instruction
stringclasses 1
value | output
stringlengths 64
69.4k
| input
stringlengths 205
32.4k
|
---|---|---|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void restoreState(Object[] state) {
eventChunk = (ComplexEventChunk<ComplexEvent>) state[0];
//endOfChunk = (Boolean) state[1];
}
|
#vulnerable code
@Override
public void restoreState(Object[] state) {
eventChunk = (ComplexEventChunk<ComplexEvent>) state[0];
endOfChunk = (Boolean) state[1];
}
#location 4
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void process(ComplexEventChunk complexEventChunk) {
if (trigger) {
ComplexEventChunk<StateEvent> returnEventChunk = new ComplexEventChunk<StateEvent>(true);
StateEvent joinStateEvent = new StateEvent(2, 0);
StreamEvent nextEvent = (StreamEvent) complexEventChunk.getFirst();
complexEventChunk.clear();
while (nextEvent != null) {
StreamEvent streamEvent = nextEvent;
nextEvent = streamEvent.getNext();
streamEvent.setNext(null);
joinLock.lock();
try {
ComplexEvent.Type eventType = streamEvent.getType();
if (eventType == ComplexEvent.Type.TIMER || eventType == ComplexEvent.Type.RESET) {
continue;
}
joinStateEvent.setEvent(matchingStreamIndex, streamEvent);
StreamEvent foundStreamEvent = findableProcessor.find(joinStateEvent, finder);
joinStateEvent.setEvent(matchingStreamIndex, null);
if (foundStreamEvent == null) {
if (outerJoinProcessor && !leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(foundStreamEvent, streamEvent, eventType));
} else if (outerJoinProcessor && leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(streamEvent, foundStreamEvent, eventType));
}
} else {
while (foundStreamEvent != null) {
if (!leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(foundStreamEvent, streamEvent, eventType));
} else {
returnEventChunk.add(joinEventBuilder(streamEvent, foundStreamEvent, eventType));
}
foundStreamEvent = foundStreamEvent.getNext();
}
}
} finally {
joinLock.unlock();
}
if (returnEventChunk.getFirst() != null) {
selector.process(returnEventChunk);
returnEventChunk.clear();
}
}
}
}
|
#vulnerable code
@Override
public void process(ComplexEventChunk complexEventChunk) {
if (trigger) {
ComplexEventChunk<StateEvent> returnEventChunk = new ComplexEventChunk<StateEvent>(true);
StateEvent joinStateEvent = new StateEvent(2, 0);
StreamEvent nextEvent = (StreamEvent) complexEventChunk.getFirst();
complexEventChunk.clear();
while (nextEvent != null) {
StreamEvent streamEvent = nextEvent;
nextEvent = streamEvent.getNext();
streamEvent.setNext(null);
joinLock.lock();
try {
if (streamEvent.getType() == ComplexEvent.Type.TIMER) {
if (preJoinProcessor) {
complexEventChunk.add(streamEvent);
nextProcessor.process(complexEventChunk);
complexEventChunk.clear();
}
continue;
} else if (streamEvent.getType() == ComplexEvent.Type.CURRENT) {
if (!preJoinProcessor) {
continue;
}
} else if (streamEvent.getType() == ComplexEvent.Type.EXPIRED) {
if (preJoinProcessor) {
continue;
}
} else if (streamEvent.getType() == ComplexEvent.Type.RESET) {
continue;
}
joinStateEvent.setEvent(matchingStreamIndex, streamEvent);
StreamEvent foundStreamEvent = findableProcessor.find(joinStateEvent, finder);
joinStateEvent.setEvent(matchingStreamIndex, null);
if (foundStreamEvent == null) {
if (outerJoinProcessor && !leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(foundStreamEvent, streamEvent));
} else if (outerJoinProcessor && leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(streamEvent, foundStreamEvent));
}
} else {
while (foundStreamEvent != null) {
if (!leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(foundStreamEvent, streamEvent));
} else {
returnEventChunk.add(joinEventBuilder(streamEvent, foundStreamEvent));
}
foundStreamEvent = foundStreamEvent.getNext();
}
}
if (preJoinProcessor) {
complexEventChunk.add(streamEvent);
nextProcessor.process(complexEventChunk);
complexEventChunk.clear();
}
} finally {
joinLock.unlock();
}
if (returnEventChunk.getFirst() != null) {
selector.process(returnEventChunk);
returnEventChunk.clear();
}
}
} else {
if (preJoinProcessor) {
joinLock.lock();
try {
nextProcessor.process(complexEventChunk);
} finally {
joinLock.unlock();
}
}
}
}
#location 17
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void process(ComplexEventChunk complexEventChunk) {
if (trigger) {
ComplexEventChunk<StateEvent> returnEventChunk = new ComplexEventChunk<StateEvent>(true);
StateEvent joinStateEvent = new StateEvent(2, 0);
StreamEvent nextEvent = (StreamEvent) complexEventChunk.getFirst();
complexEventChunk.clear();
while (nextEvent != null) {
StreamEvent streamEvent = nextEvent;
nextEvent = streamEvent.getNext();
streamEvent.setNext(null);
joinLock.lock();
try {
ComplexEvent.Type eventType = streamEvent.getType();
if (eventType == ComplexEvent.Type.TIMER) {
continue;
} else if (eventType == ComplexEvent.Type.RESET) {
if (outerJoinProcessor && !leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(null, streamEvent, eventType));
} else if (outerJoinProcessor && leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(streamEvent, null, eventType));
}
} else {
joinStateEvent.setEvent(matchingStreamIndex, streamEvent);
StreamEvent foundStreamEvent = findableProcessor.find(joinStateEvent, finder);
joinStateEvent.setEvent(matchingStreamIndex, null);
if (foundStreamEvent == null) {
if (outerJoinProcessor && !leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(foundStreamEvent, streamEvent, eventType));
} else if (outerJoinProcessor && leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(streamEvent, foundStreamEvent, eventType));
}
} else {
while (foundStreamEvent != null) {
if (!leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(foundStreamEvent, streamEvent, eventType));
} else {
returnEventChunk.add(joinEventBuilder(streamEvent, foundStreamEvent, eventType));
}
foundStreamEvent = foundStreamEvent.getNext();
}
}
}
} finally {
joinLock.unlock();
}
if (returnEventChunk.getFirst() != null) {
selector.process(returnEventChunk);
returnEventChunk.clear();
}
}
} else {
if (preJoinProcessor) {
joinLock.lock();
try {
nextProcessor.process(complexEventChunk);
} finally {
joinLock.unlock();
}
}
}
}
|
#vulnerable code
@Override
public void process(ComplexEventChunk complexEventChunk) {
if (trigger) {
ComplexEventChunk<StateEvent> returnEventChunk = new ComplexEventChunk<StateEvent>(true);
StateEvent joinStateEvent = new StateEvent(2, 0);
StreamEvent nextEvent = (StreamEvent) complexEventChunk.getFirst();
complexEventChunk.clear();
while (nextEvent != null) {
StreamEvent streamEvent = nextEvent;
nextEvent = streamEvent.getNext();
streamEvent.setNext(null);
if (streamEvent.getType() == ComplexEvent.Type.RESET) {
if (outerJoinProcessor && !leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(null, streamEvent, streamEvent.getType()));
} else if (outerJoinProcessor && leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(streamEvent, null, streamEvent.getType()));
}
selector.process(returnEventChunk);
returnEventChunk.clear();
continue;
}
joinLock.lock();
try {
ComplexEvent.Type eventType = streamEvent.getType();
if (eventType == ComplexEvent.Type.TIMER) {
continue;
}
joinStateEvent.setEvent(matchingStreamIndex, streamEvent);
StreamEvent foundStreamEvent = findableProcessor.find(joinStateEvent, finder);
joinStateEvent.setEvent(matchingStreamIndex, null);
if (foundStreamEvent == null) {
if (outerJoinProcessor && !leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(foundStreamEvent, streamEvent, eventType));
} else if (outerJoinProcessor && leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(streamEvent, foundStreamEvent, eventType));
}
} else {
while (foundStreamEvent != null) {
if (!leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(foundStreamEvent, streamEvent, eventType));
} else {
returnEventChunk.add(joinEventBuilder(streamEvent, foundStreamEvent, eventType));
}
foundStreamEvent = foundStreamEvent.getNext();
}
}
} finally {
joinLock.unlock();
}
if (returnEventChunk.getFirst() != null) {
selector.process(returnEventChunk);
returnEventChunk.clear();
}
}
}
}
#location 37
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public Object[] currentState() {
return new Object[]{eventChunk};
}
|
#vulnerable code
@Override
public Object[] currentState() {
return new Object[]{eventChunk, endOfChunk};
}
#location 3
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
protected void process(ComplexEventChunk<StreamEvent> streamEventChunk, Processor nextProcessor,
StreamEventCloner streamEventCloner, ComplexEventPopulater complexEventPopulater) {
ComplexEventChunk<StreamEvent> complexEventChunk = new ComplexEventChunk<StreamEvent>(false);
lock.lock();
try {
while (streamEventChunk.hasNext()) {
StreamEvent event = streamEventChunk.next();
if(event.getType() != ComplexEvent.Type.TIMER) {
streamEventChunk.remove(); //We might have the rest of the events linked to this event forming a chain.
long timestamp = (Long) timestampExecutor.execute(event);
if (expireFlag) {
if (timestamp < lastSentTimeStamp) {
continue;
}
}
ArrayList<StreamEvent> eventList = eventTreeMap.get(timestamp);
if (eventList == null) {
eventList = new ArrayList<StreamEvent>();
}
eventList.add(event);
eventTreeMap.put(timestamp, eventList);
if (timestamp > greatestTimestamp) {
greatestTimestamp = timestamp;
long minTimestamp = eventTreeMap.firstKey();
long timeDifference = greatestTimestamp - minTimestamp;
if (timeDifference > k) {
if (timeDifference < MAX_K) {
k = timeDifference;
} else {
k = MAX_K;
}
}
Iterator<Map.Entry<Long, ArrayList<StreamEvent>>> entryIterator = eventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
ArrayList<StreamEvent> list = expiredEventTreeMap.get(entry.getKey());
if (list != null) {
list.addAll(entry.getValue());
} else {
expiredEventTreeMap.put(entry.getKey(), entry.getValue());
}
}
eventTreeMap = new TreeMap<Long, ArrayList<StreamEvent>>();
entryIterator = expiredEventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
if (entry.getKey() + k <= greatestTimestamp) {
entryIterator.remove();
ArrayList<StreamEvent> timeEventList = entry.getValue();
lastSentTimeStamp = entry.getKey();
for (StreamEvent aTimeEventList : timeEventList) {
complexEventChunk.add(aTimeEventList);
}
}
}
}
} else {
if(expiredEventTreeMap.size() > 0) {
TreeMap<Long, ArrayList<StreamEvent>> expiredEventTreeMapSnapShot = expiredEventTreeMap;
expiredEventTreeMap = new TreeMap<Long, ArrayList<StreamEvent>>();
onTimerEvent(expiredEventTreeMapSnapShot, nextProcessor);
lastScheduledTimestamp = lastScheduledTimestamp + TIMER_DURATION;
scheduler.notifyAt(lastScheduledTimestamp);
}
}
}
} catch (ArrayIndexOutOfBoundsException ec) {
//This happens due to user specifying an invalid field index.
throw new ExecutionPlanCreationException("The very first parameter must be an Integer with a valid " +
" field index (0 to (fieldsLength-1)).");
}
lock.unlock();
nextProcessor.process(complexEventChunk);
}
|
#vulnerable code
@Override
protected void process(ComplexEventChunk<StreamEvent> streamEventChunk, Processor nextProcessor,
StreamEventCloner streamEventCloner, ComplexEventPopulater complexEventPopulater) {
ComplexEventChunk<StreamEvent> complexEventChunk = new ComplexEventChunk<StreamEvent>(false);
try {
while (streamEventChunk.hasNext()) {
StreamEvent event = streamEventChunk.next();
if(event.getType() != ComplexEvent.Type.TIMER) {
streamEventChunk.remove(); //We might have the rest of the events linked to this event forming a chain.
long timestamp = (Long) timestampExecutor.execute(event);
if (expireFlag) {
if (timestamp < lastSentTimeStamp) {
continue;
}
}
ArrayList<StreamEvent> eventList = eventTreeMap.get(timestamp);
if (eventList == null) {
eventList = new ArrayList<StreamEvent>();
}
eventList.add(event);
eventTreeMap.put(timestamp, eventList);
if (timestamp > greatestTimestamp) {
greatestTimestamp = timestamp;
long minTimestamp = eventTreeMap.firstKey();
if ((greatestTimestamp - minTimestamp) > k) {
if ((greatestTimestamp - minTimestamp) < MAX_K) {
k = greatestTimestamp - minTimestamp;
} else {
k = MAX_K;
}
}
lock.lock();
Iterator<Map.Entry<Long, ArrayList<StreamEvent>>> entryIterator = eventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
ArrayList<StreamEvent> list = expiredEventTreeMap.get(entry.getKey());
if (list != null) {
list.addAll(entry.getValue());
} else {
expiredEventTreeMap.put(entry.getKey(), entry.getValue());
}
}
eventTreeMap = new TreeMap<Long, ArrayList<StreamEvent>>();
entryIterator = expiredEventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
if (entry.getKey() + k <= greatestTimestamp) {
entryIterator.remove();
ArrayList<StreamEvent> timeEventList = entry.getValue();
lastSentTimeStamp = entry.getKey();
for (StreamEvent aTimeEventList : timeEventList) {
complexEventChunk.add(aTimeEventList);
}
}
}
lock.unlock();
}
} else {
onTimerEvent(expiredEventTreeMap, nextProcessor);
lastScheduledTimestamp = lastScheduledTimestamp + TIMER_DURATION;
scheduler.notifyAt(lastScheduledTimestamp);
}
}
} catch (ArrayIndexOutOfBoundsException ec) {
// This happens due to user specifying an invalid field index.
throw new ExecutionPlanCreationException("The very first parameter must be an Integer with a valid " +
" field index (0 to (fieldsLength-1)).");
}
nextProcessor.process(complexEventChunk);
}
#location 13
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
protected void process(ComplexEventChunk<StreamEvent> streamEventChunk, Processor nextProcessor, StreamEventCloner streamEventCloner, ComplexEventPopulater complexEventPopulater) {
while (streamEventChunk.hasNext()) {
ComplexEvent complexEvent = streamEventChunk.next();
Object[] inputData = new Object[attributeExpressionLength-paramPosition];
// Obtain x value that user wants to use to forecast Y
double xDash = ((Number) attributeExpressionExecutors[paramPosition-1].execute(complexEvent)).doubleValue();
for (int i = paramPosition; i < attributeExpressionLength; i++) {
inputData[i-paramPosition] = attributeExpressionExecutors[i].execute(complexEvent);
}
Object[] coefficients = regressionCalculator.calculateLinearRegression(inputData);
if (coefficients == null) {
streamEventChunk.remove();
} else {
Object[] outputData = new Object[coefficients.length+1];
System.arraycopy(coefficients, 0, outputData, 0, coefficients.length);
// Calculating forecast Y based on regression equation and given x
outputData[coefficients.length] = ((Number) coefficients[coefficients.length-2]).doubleValue() + ((Number) coefficients[coefficients.length-1]).doubleValue() * xDash;
complexEventPopulater.populateComplexEvent(complexEvent, outputData);
}
}
nextProcessor.process(streamEventChunk);
}
|
#vulnerable code
@Override
protected void process(ComplexEventChunk<StreamEvent> streamEventChunk, Processor nextProcessor, StreamEventCloner streamEventCloner, ComplexEventPopulater complexEventPopulater) {
while (streamEventChunk.hasNext()) {
ComplexEvent complexEvent = streamEventChunk.next();
Object[] inputData = new Object[attributeExpressionLength-paramPosition];
double xDash = ((Number) attributeExpressionExecutors[paramPosition-1].execute(complexEvent)).doubleValue();
for (int i = paramPosition; i < attributeExpressionLength; i++) {
inputData[i-paramPosition] = attributeExpressionExecutors[i].execute(complexEvent);
}
Object[] temp = regressionCalculator.calculateLinearRegression(inputData);
Object[] outputData = new Object[temp.length+1];
System.arraycopy(temp, 0, outputData, 0, temp.length);
outputData[temp.length] = ((Number) temp[temp.length-2]).doubleValue() + ((Number) temp[temp.length-1]).doubleValue() * xDash;
if (outputData == null) {
streamEventChunk.remove();
} else {
complexEventPopulater.populateComplexEvent(complexEvent, outputData);
}
}
nextProcessor.process(streamEventChunk);
}
#location 12
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
protected void process(ComplexEventChunk<StreamEvent> streamEventChunk, Processor nextProcessor,
StreamEventCloner streamEventCloner, ComplexEventPopulater complexEventPopulater) {
ComplexEventChunk<StreamEvent> complexEventChunk = new ComplexEventChunk<StreamEvent>(false);
lock.lock();
try {
while (streamEventChunk.hasNext()) {
StreamEvent event = streamEventChunk.next();
if(event.getType() != ComplexEvent.Type.TIMER) {
streamEventChunk.remove(); //We might have the rest of the events linked to this event forming a chain.
long timestamp = (Long) timestampExecutor.execute(event);
if (expireFlag) {
if (timestamp < lastSentTimeStamp) {
continue;
}
}
ArrayList<StreamEvent> eventList = eventTreeMap.get(timestamp);
if (eventList == null) {
eventList = new ArrayList<StreamEvent>();
}
eventList.add(event);
eventTreeMap.put(timestamp, eventList);
if (timestamp > greatestTimestamp) {
greatestTimestamp = timestamp;
long minTimestamp = eventTreeMap.firstKey();
long timeDifference = greatestTimestamp - minTimestamp;
if (timeDifference > k) {
if (timeDifference < MAX_K) {
k = timeDifference;
} else {
k = MAX_K;
}
}
Iterator<Map.Entry<Long, ArrayList<StreamEvent>>> entryIterator = eventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
ArrayList<StreamEvent> list = expiredEventTreeMap.get(entry.getKey());
if (list != null) {
list.addAll(entry.getValue());
} else {
expiredEventTreeMap.put(entry.getKey(), entry.getValue());
}
}
eventTreeMap = new TreeMap<Long, ArrayList<StreamEvent>>();
entryIterator = expiredEventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
if (entry.getKey() + k <= greatestTimestamp) {
entryIterator.remove();
ArrayList<StreamEvent> timeEventList = entry.getValue();
lastSentTimeStamp = entry.getKey();
for (StreamEvent aTimeEventList : timeEventList) {
complexEventChunk.add(aTimeEventList);
}
}
}
}
} else {
if(expiredEventTreeMap.size() > 0) {
TreeMap<Long, ArrayList<StreamEvent>> expiredEventTreeMapSnapShot = expiredEventTreeMap;
expiredEventTreeMap = new TreeMap<Long, ArrayList<StreamEvent>>();
onTimerEvent(expiredEventTreeMapSnapShot, nextProcessor);
lastScheduledTimestamp = lastScheduledTimestamp + TIMER_DURATION;
scheduler.notifyAt(lastScheduledTimestamp);
}
}
}
} catch (ArrayIndexOutOfBoundsException ec) {
//This happens due to user specifying an invalid field index.
throw new ExecutionPlanCreationException("The very first parameter must be an Integer with a valid " +
" field index (0 to (fieldsLength-1)).");
}
lock.unlock();
nextProcessor.process(complexEventChunk);
}
|
#vulnerable code
@Override
protected void process(ComplexEventChunk<StreamEvent> streamEventChunk, Processor nextProcessor,
StreamEventCloner streamEventCloner, ComplexEventPopulater complexEventPopulater) {
ComplexEventChunk<StreamEvent> complexEventChunk = new ComplexEventChunk<StreamEvent>(false);
try {
while (streamEventChunk.hasNext()) {
StreamEvent event = streamEventChunk.next();
if(event.getType() != ComplexEvent.Type.TIMER) {
streamEventChunk.remove(); //We might have the rest of the events linked to this event forming a chain.
long timestamp = (Long) timestampExecutor.execute(event);
if (expireFlag) {
if (timestamp < lastSentTimeStamp) {
continue;
}
}
ArrayList<StreamEvent> eventList = eventTreeMap.get(timestamp);
if (eventList == null) {
eventList = new ArrayList<StreamEvent>();
}
eventList.add(event);
eventTreeMap.put(timestamp, eventList);
if (timestamp > greatestTimestamp) {
greatestTimestamp = timestamp;
long minTimestamp = eventTreeMap.firstKey();
if ((greatestTimestamp - minTimestamp) > k) {
if ((greatestTimestamp - minTimestamp) < MAX_K) {
k = greatestTimestamp - minTimestamp;
} else {
k = MAX_K;
}
}
lock.lock();
Iterator<Map.Entry<Long, ArrayList<StreamEvent>>> entryIterator = eventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
ArrayList<StreamEvent> list = expiredEventTreeMap.get(entry.getKey());
if (list != null) {
list.addAll(entry.getValue());
} else {
expiredEventTreeMap.put(entry.getKey(), entry.getValue());
}
}
eventTreeMap = new TreeMap<Long, ArrayList<StreamEvent>>();
entryIterator = expiredEventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
if (entry.getKey() + k <= greatestTimestamp) {
entryIterator.remove();
ArrayList<StreamEvent> timeEventList = entry.getValue();
lastSentTimeStamp = entry.getKey();
for (StreamEvent aTimeEventList : timeEventList) {
complexEventChunk.add(aTimeEventList);
}
}
}
lock.unlock();
}
} else {
onTimerEvent(expiredEventTreeMap, nextProcessor);
lastScheduledTimestamp = lastScheduledTimestamp + TIMER_DURATION;
scheduler.notifyAt(lastScheduledTimestamp);
}
}
} catch (ArrayIndexOutOfBoundsException ec) {
// This happens due to user specifying an invalid field index.
throw new ExecutionPlanCreationException("The very first parameter must be an Integer with a valid " +
" field index (0 to (fieldsLength-1)).");
}
nextProcessor.process(complexEventChunk);
}
#location 75
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void process(ComplexEventChunk complexEventChunk) {
if (trigger) {
ComplexEventChunk<StateEvent> returnEventChunk = new ComplexEventChunk<StateEvent>(true);
StateEvent joinStateEvent = new StateEvent(2, 0);
StreamEvent nextEvent = (StreamEvent) complexEventChunk.getFirst();
complexEventChunk.clear();
while (nextEvent != null) {
StreamEvent streamEvent = nextEvent;
nextEvent = streamEvent.getNext();
streamEvent.setNext(null);
joinLockWrapper.lock();
try {
ComplexEvent.Type eventType = streamEvent.getType();
if (eventType == ComplexEvent.Type.TIMER) {
continue;
} else if (eventType == ComplexEvent.Type.RESET) {
if (outerJoinProcessor && !leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(null, streamEvent, eventType));
} else if (outerJoinProcessor && leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(streamEvent, null, eventType));
}
} else {
joinStateEvent.setEvent(matchingStreamIndex, streamEvent);
StreamEvent foundStreamEvent = findableProcessor.find(joinStateEvent, finder);
joinStateEvent.setEvent(matchingStreamIndex, null);
if (foundStreamEvent == null) {
if (outerJoinProcessor && !leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(foundStreamEvent, streamEvent, eventType));
} else if (outerJoinProcessor && leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(streamEvent, foundStreamEvent, eventType));
}
} else {
while (foundStreamEvent != null) {
if (!leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(foundStreamEvent, streamEvent, eventType));
} else {
returnEventChunk.add(joinEventBuilder(streamEvent, foundStreamEvent, eventType));
}
foundStreamEvent = foundStreamEvent.getNext();
}
}
}
} finally {
joinLockWrapper.unlock();
}
if (returnEventChunk.getFirst() != null) {
selector.process(returnEventChunk);
returnEventChunk.clear();
}
}
}
}
|
#vulnerable code
@Override
public void process(ComplexEventChunk complexEventChunk) {
if (trigger) {
ComplexEventChunk<StateEvent> returnEventChunk = new ComplexEventChunk<StateEvent>(true);
StateEvent joinStateEvent = new StateEvent(2, 0);
StreamEvent nextEvent = (StreamEvent) complexEventChunk.getFirst();
complexEventChunk.clear();
while (nextEvent != null) {
StreamEvent streamEvent = nextEvent;
nextEvent = streamEvent.getNext();
streamEvent.setNext(null);
joinLock.lock();
try {
ComplexEvent.Type eventType = streamEvent.getType();
if (eventType == ComplexEvent.Type.TIMER) {
continue;
} else if (eventType == ComplexEvent.Type.RESET) {
if (outerJoinProcessor && !leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(null, streamEvent, eventType));
} else if (outerJoinProcessor && leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(streamEvent, null, eventType));
}
} else {
joinStateEvent.setEvent(matchingStreamIndex, streamEvent);
StreamEvent foundStreamEvent = findableProcessor.find(joinStateEvent, finder);
joinStateEvent.setEvent(matchingStreamIndex, null);
if (foundStreamEvent == null) {
if (outerJoinProcessor && !leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(foundStreamEvent, streamEvent, eventType));
} else if (outerJoinProcessor && leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(streamEvent, foundStreamEvent, eventType));
}
} else {
while (foundStreamEvent != null) {
if (!leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(foundStreamEvent, streamEvent, eventType));
} else {
returnEventChunk.add(joinEventBuilder(streamEvent, foundStreamEvent, eventType));
}
foundStreamEvent = foundStreamEvent.getNext();
}
}
}
} finally {
joinLock.unlock();
}
if (returnEventChunk.getFirst() != null) {
selector.process(returnEventChunk);
returnEventChunk.clear();
}
}
} else {
if (preJoinProcessor) {
joinLock.lock();
try {
nextProcessor.process(complexEventChunk);
} finally {
joinLock.unlock();
}
}
}
}
#location 11
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
protected void process(ComplexEventChunk<StreamEvent> streamEventChunk, Processor nextProcessor,
StreamEventCloner streamEventCloner, ComplexEventPopulater complexEventPopulater) {
ComplexEventChunk<StreamEvent> complexEventChunk = new ComplexEventChunk<StreamEvent>(false);
lock.lock();
try {
while (streamEventChunk.hasNext()) {
StreamEvent event = streamEventChunk.next();
if(event.getType() != ComplexEvent.Type.TIMER) {
streamEventChunk.remove(); //We might have the rest of the events linked to this event forming a chain.
long timestamp = (Long) timestampExecutor.execute(event);
if (expireFlag) {
if (timestamp < lastSentTimeStamp) {
continue;
}
}
ArrayList<StreamEvent> eventList = eventTreeMap.get(timestamp);
if (eventList == null) {
eventList = new ArrayList<StreamEvent>();
}
eventList.add(event);
eventTreeMap.put(timestamp, eventList);
if (timestamp > greatestTimestamp) {
greatestTimestamp = timestamp;
long minTimestamp = eventTreeMap.firstKey();
long timeDifference = greatestTimestamp - minTimestamp;
if (timeDifference > k) {
if (timeDifference < MAX_K) {
k = timeDifference;
} else {
k = MAX_K;
}
}
Iterator<Map.Entry<Long, ArrayList<StreamEvent>>> entryIterator = eventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
ArrayList<StreamEvent> list = expiredEventTreeMap.get(entry.getKey());
if (list != null) {
list.addAll(entry.getValue());
} else {
expiredEventTreeMap.put(entry.getKey(), entry.getValue());
}
}
eventTreeMap = new TreeMap<Long, ArrayList<StreamEvent>>();
entryIterator = expiredEventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
if (entry.getKey() + k <= greatestTimestamp) {
entryIterator.remove();
ArrayList<StreamEvent> timeEventList = entry.getValue();
lastSentTimeStamp = entry.getKey();
for (StreamEvent aTimeEventList : timeEventList) {
complexEventChunk.add(aTimeEventList);
}
}
}
}
} else {
if(expiredEventTreeMap.size() > 0) {
TreeMap<Long, ArrayList<StreamEvent>> expiredEventTreeMapSnapShot = expiredEventTreeMap;
expiredEventTreeMap = new TreeMap<Long, ArrayList<StreamEvent>>();
onTimerEvent(expiredEventTreeMapSnapShot, nextProcessor);
lastScheduledTimestamp = lastScheduledTimestamp + TIMER_DURATION;
scheduler.notifyAt(lastScheduledTimestamp);
}
}
}
} catch (ArrayIndexOutOfBoundsException ec) {
//This happens due to user specifying an invalid field index.
throw new ExecutionPlanCreationException("The very first parameter must be an Integer with a valid " +
" field index (0 to (fieldsLength-1)).");
}
lock.unlock();
nextProcessor.process(complexEventChunk);
}
|
#vulnerable code
@Override
protected void process(ComplexEventChunk<StreamEvent> streamEventChunk, Processor nextProcessor,
StreamEventCloner streamEventCloner, ComplexEventPopulater complexEventPopulater) {
ComplexEventChunk<StreamEvent> complexEventChunk = new ComplexEventChunk<StreamEvent>(false);
try {
while (streamEventChunk.hasNext()) {
StreamEvent event = streamEventChunk.next();
if(event.getType() != ComplexEvent.Type.TIMER) {
streamEventChunk.remove(); //We might have the rest of the events linked to this event forming a chain.
long timestamp = (Long) timestampExecutor.execute(event);
if (expireFlag) {
if (timestamp < lastSentTimeStamp) {
continue;
}
}
ArrayList<StreamEvent> eventList = eventTreeMap.get(timestamp);
if (eventList == null) {
eventList = new ArrayList<StreamEvent>();
}
eventList.add(event);
eventTreeMap.put(timestamp, eventList);
if (timestamp > greatestTimestamp) {
greatestTimestamp = timestamp;
long minTimestamp = eventTreeMap.firstKey();
if ((greatestTimestamp - minTimestamp) > k) {
if ((greatestTimestamp - minTimestamp) < MAX_K) {
k = greatestTimestamp - minTimestamp;
} else {
k = MAX_K;
}
}
lock.lock();
Iterator<Map.Entry<Long, ArrayList<StreamEvent>>> entryIterator = eventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
ArrayList<StreamEvent> list = expiredEventTreeMap.get(entry.getKey());
if (list != null) {
list.addAll(entry.getValue());
} else {
expiredEventTreeMap.put(entry.getKey(), entry.getValue());
}
}
eventTreeMap = new TreeMap<Long, ArrayList<StreamEvent>>();
entryIterator = expiredEventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
if (entry.getKey() + k <= greatestTimestamp) {
entryIterator.remove();
ArrayList<StreamEvent> timeEventList = entry.getValue();
lastSentTimeStamp = entry.getKey();
for (StreamEvent aTimeEventList : timeEventList) {
complexEventChunk.add(aTimeEventList);
}
}
}
lock.unlock();
}
} else {
onTimerEvent(expiredEventTreeMap, nextProcessor);
lastScheduledTimestamp = lastScheduledTimestamp + TIMER_DURATION;
scheduler.notifyAt(lastScheduledTimestamp);
}
}
} catch (ArrayIndexOutOfBoundsException ec) {
// This happens due to user specifying an invalid field index.
throw new ExecutionPlanCreationException("The very first parameter must be an Integer with a valid " +
" field index (0 to (fieldsLength-1)).");
}
nextProcessor.process(complexEventChunk);
}
#location 33
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
protected void processAndClear(int processIndex, StreamEvent streamEvent) {
ComplexEventChunk<StateEvent> retEventChunk = new ComplexEventChunk<StateEvent>(false);
ComplexEventChunk<StreamEvent> currentStreamEventChunk = new ComplexEventChunk<StreamEvent>(streamEvent, streamEvent, false);
ComplexEventChunk<StateEvent> eventChunk = ((StreamPreStateProcessor) nextProcessors[processIndex]).processAndReturn(currentStreamEventChunk);
if(eventChunk.getFirst() != null){
retEventChunk.add(eventChunk.getFirst());
}
eventChunk.clear();
if(querySelector!= null) {
while (retEventChunk.hasNext()) {
StateEvent stateEvent = retEventChunk.next();
retEventChunk.remove();
querySelector.process(new ComplexEventChunk<StateEvent>(stateEvent,stateEvent, false));
}
}
}
|
#vulnerable code
protected void processAndClear(int processIndex, StreamEvent streamEvent) {
ComplexEventChunk<StateEvent> retEventChunk = new ComplexEventChunk<StateEvent>(false);
ComplexEventChunk<StreamEvent> currentStreamEventChunk = new ComplexEventChunk<StreamEvent>(streamEvent, streamEvent, false);
synchronized (lockKey) {
ComplexEventChunk<StateEvent> eventChunk = ((StreamPreStateProcessor) nextProcessors[processIndex]).processAndReturn(currentStreamEventChunk);
if(eventChunk.getFirst() != null){
retEventChunk.add(eventChunk.getFirst());
}
eventChunk.clear();
}
if(querySelector!= null) {
while (retEventChunk.hasNext()) {
StateEvent stateEvent = retEventChunk.next();
retEventChunk.remove();
querySelector.process(new ComplexEventChunk<StateEvent>(stateEvent,stateEvent, false));
}
}
}
#location 16
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
protected void process(ComplexEventChunk<StreamEvent> streamEventChunk, Processor nextProcessor,
StreamEventCloner streamEventCloner, ComplexEventPopulater complexEventPopulater) {
ComplexEventChunk<StreamEvent> complexEventChunk = new ComplexEventChunk<StreamEvent>(false);
lock.lock();
try {
while (streamEventChunk.hasNext()) {
StreamEvent event = streamEventChunk.next();
if(event.getType() != ComplexEvent.Type.TIMER) {
streamEventChunk.remove(); //We might have the rest of the events linked to this event forming a chain.
long timestamp = (Long) timestampExecutor.execute(event);
if (expireFlag) {
if (timestamp < lastSentTimeStamp) {
continue;
}
}
ArrayList<StreamEvent> eventList = eventTreeMap.get(timestamp);
if (eventList == null) {
eventList = new ArrayList<StreamEvent>();
}
eventList.add(event);
eventTreeMap.put(timestamp, eventList);
if (timestamp > greatestTimestamp) {
greatestTimestamp = timestamp;
long minTimestamp = eventTreeMap.firstKey();
long timeDifference = greatestTimestamp - minTimestamp;
if (timeDifference > k) {
if (timeDifference < MAX_K) {
k = timeDifference;
} else {
k = MAX_K;
}
}
Iterator<Map.Entry<Long, ArrayList<StreamEvent>>> entryIterator = eventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
ArrayList<StreamEvent> list = expiredEventTreeMap.get(entry.getKey());
if (list != null) {
list.addAll(entry.getValue());
} else {
expiredEventTreeMap.put(entry.getKey(), entry.getValue());
}
}
eventTreeMap = new TreeMap<Long, ArrayList<StreamEvent>>();
entryIterator = expiredEventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
if (entry.getKey() + k <= greatestTimestamp) {
entryIterator.remove();
ArrayList<StreamEvent> timeEventList = entry.getValue();
lastSentTimeStamp = entry.getKey();
for (StreamEvent aTimeEventList : timeEventList) {
complexEventChunk.add(aTimeEventList);
}
}
}
}
} else {
if(expiredEventTreeMap.size() > 0) {
TreeMap<Long, ArrayList<StreamEvent>> expiredEventTreeMapSnapShot = expiredEventTreeMap;
expiredEventTreeMap = new TreeMap<Long, ArrayList<StreamEvent>>();
onTimerEvent(expiredEventTreeMapSnapShot, nextProcessor);
lastScheduledTimestamp = lastScheduledTimestamp + TIMER_DURATION;
scheduler.notifyAt(lastScheduledTimestamp);
}
}
}
} catch (ArrayIndexOutOfBoundsException ec) {
//This happens due to user specifying an invalid field index.
throw new ExecutionPlanCreationException("The very first parameter must be an Integer with a valid " +
" field index (0 to (fieldsLength-1)).");
}
lock.unlock();
nextProcessor.process(complexEventChunk);
}
|
#vulnerable code
@Override
protected void process(ComplexEventChunk<StreamEvent> streamEventChunk, Processor nextProcessor,
StreamEventCloner streamEventCloner, ComplexEventPopulater complexEventPopulater) {
ComplexEventChunk<StreamEvent> complexEventChunk = new ComplexEventChunk<StreamEvent>(false);
try {
while (streamEventChunk.hasNext()) {
StreamEvent event = streamEventChunk.next();
if(event.getType() != ComplexEvent.Type.TIMER) {
streamEventChunk.remove(); //We might have the rest of the events linked to this event forming a chain.
long timestamp = (Long) timestampExecutor.execute(event);
if (expireFlag) {
if (timestamp < lastSentTimeStamp) {
continue;
}
}
ArrayList<StreamEvent> eventList = eventTreeMap.get(timestamp);
if (eventList == null) {
eventList = new ArrayList<StreamEvent>();
}
eventList.add(event);
eventTreeMap.put(timestamp, eventList);
if (timestamp > greatestTimestamp) {
greatestTimestamp = timestamp;
long minTimestamp = eventTreeMap.firstKey();
if ((greatestTimestamp - minTimestamp) > k) {
if ((greatestTimestamp - minTimestamp) < MAX_K) {
k = greatestTimestamp - minTimestamp;
} else {
k = MAX_K;
}
}
lock.lock();
Iterator<Map.Entry<Long, ArrayList<StreamEvent>>> entryIterator = eventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
ArrayList<StreamEvent> list = expiredEventTreeMap.get(entry.getKey());
if (list != null) {
list.addAll(entry.getValue());
} else {
expiredEventTreeMap.put(entry.getKey(), entry.getValue());
}
}
eventTreeMap = new TreeMap<Long, ArrayList<StreamEvent>>();
entryIterator = expiredEventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
if (entry.getKey() + k <= greatestTimestamp) {
entryIterator.remove();
ArrayList<StreamEvent> timeEventList = entry.getValue();
lastSentTimeStamp = entry.getKey();
for (StreamEvent aTimeEventList : timeEventList) {
complexEventChunk.add(aTimeEventList);
}
}
}
lock.unlock();
}
} else {
onTimerEvent(expiredEventTreeMap, nextProcessor);
lastScheduledTimestamp = lastScheduledTimestamp + TIMER_DURATION;
scheduler.notifyAt(lastScheduledTimestamp);
}
}
} catch (ArrayIndexOutOfBoundsException ec) {
// This happens due to user specifying an invalid field index.
throw new ExecutionPlanCreationException("The very first parameter must be an Integer with a valid " +
" field index (0 to (fieldsLength-1)).");
}
nextProcessor.process(complexEventChunk);
}
#location 15
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void run() {
final Lock consumerLock = this.consumerLock;
while (!inactive) {
while (!paused) {
// The time, in milliseconds, spent waiting in poll if data is not available. If 0, returns
// immediately with any records that are available now. Must not be negative
ConsumerRecords<byte[], byte[]> records;
try {
consumerLock.lock();
records = consumer.poll(100);
} finally {
consumerLock.unlock();
}
for (ConsumerRecord record : records) {
String event = record.value().toString();
if (log.isDebugEnabled()) {
log.debug("Event received in Kafka Event Adaptor: " + event + ", offSet: " + record.offset()
+ ", key: " + record.key() + ", topic: " + record.topic() + ", partition: " + record
.partition());
}
topicOffsetMap.get(record.topic()).put(record.partition(), record.offset());
sourceEventListener.onEvent(event);
}
try {
consumerLock.lock();
if (!records.isEmpty()) {
consumer.commitAsync();
}
} catch (CommitFailedException e) {
log.error("Kafka commit failed for topic kafka_result_topic", e);
} finally {
consumerLock.unlock();
}
}
}
consumerLock.lock();
consumer.close();
consumerLock.unlock();
}
|
#vulnerable code
@Override
public void run() {
while (!inactive) {
while (!paused) {
// The time, in milliseconds, spent waiting in poll if data is not available. If 0, returns
// immediately with any records that are available now. Must not be negative
ConsumerRecords<byte[], byte[]> records = consumer.poll(200);
for (ConsumerRecord record : records) {
String event = record.value().toString();
if (log.isDebugEnabled()) {
log.debug("Event received in Kafka Event Adaptor: " + event + ", offSet: " + record.offset() +
", key: " + record.key() + ", topic: " + record.topic() + ", partition: " + record
.partition());
}
topicOffsetMap.get(record.topic()).put(record.partition(), record.offset());
sourceEventListener.onEvent(event);
}
try {
if (!records.isEmpty()) {
consumer.commitAsync();
}
} catch (CommitFailedException e) {
log.error("Kafka commit failed for topic kafka_result_topic", e);
}
}
try {
Thread.sleep(1);
} catch (InterruptedException ignore) {
}
}
consumer.close();
}
#location 15
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
protected void process(ComplexEventChunk<StreamEvent> streamEventChunk, Processor nextProcessor,
StreamEventCloner streamEventCloner, ComplexEventPopulater complexEventPopulater) {
ComplexEventChunk<StreamEvent> complexEventChunk = new ComplexEventChunk<StreamEvent>(false);
lock.lock();
try {
while (streamEventChunk.hasNext()) {
StreamEvent event = streamEventChunk.next();
if(event.getType() != ComplexEvent.Type.TIMER) {
streamEventChunk.remove(); //We might have the rest of the events linked to this event forming a chain.
long timestamp = (Long) timestampExecutor.execute(event);
if (expireFlag) {
if (timestamp < lastSentTimeStamp) {
continue;
}
}
ArrayList<StreamEvent> eventList = eventTreeMap.get(timestamp);
if (eventList == null) {
eventList = new ArrayList<StreamEvent>();
}
eventList.add(event);
eventTreeMap.put(timestamp, eventList);
if (timestamp > greatestTimestamp) {
greatestTimestamp = timestamp;
long minTimestamp = eventTreeMap.firstKey();
long timeDifference = greatestTimestamp - minTimestamp;
if (timeDifference > k) {
if (timeDifference < MAX_K) {
k = timeDifference;
} else {
k = MAX_K;
}
}
Iterator<Map.Entry<Long, ArrayList<StreamEvent>>> entryIterator = eventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
ArrayList<StreamEvent> list = expiredEventTreeMap.get(entry.getKey());
if (list != null) {
list.addAll(entry.getValue());
} else {
expiredEventTreeMap.put(entry.getKey(), entry.getValue());
}
}
eventTreeMap = new TreeMap<Long, ArrayList<StreamEvent>>();
entryIterator = expiredEventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
if (entry.getKey() + k <= greatestTimestamp) {
entryIterator.remove();
ArrayList<StreamEvent> timeEventList = entry.getValue();
lastSentTimeStamp = entry.getKey();
for (StreamEvent aTimeEventList : timeEventList) {
complexEventChunk.add(aTimeEventList);
}
}
}
}
} else {
if(expiredEventTreeMap.size() > 0) {
TreeMap<Long, ArrayList<StreamEvent>> expiredEventTreeMapSnapShot = expiredEventTreeMap;
expiredEventTreeMap = new TreeMap<Long, ArrayList<StreamEvent>>();
onTimerEvent(expiredEventTreeMapSnapShot, nextProcessor);
lastScheduledTimestamp = lastScheduledTimestamp + TIMER_DURATION;
scheduler.notifyAt(lastScheduledTimestamp);
}
}
}
} catch (ArrayIndexOutOfBoundsException ec) {
//This happens due to user specifying an invalid field index.
throw new ExecutionPlanCreationException("The very first parameter must be an Integer with a valid " +
" field index (0 to (fieldsLength-1)).");
}
lock.unlock();
nextProcessor.process(complexEventChunk);
}
|
#vulnerable code
@Override
protected void process(ComplexEventChunk<StreamEvent> streamEventChunk, Processor nextProcessor,
StreamEventCloner streamEventCloner, ComplexEventPopulater complexEventPopulater) {
ComplexEventChunk<StreamEvent> complexEventChunk = new ComplexEventChunk<StreamEvent>(false);
try {
while (streamEventChunk.hasNext()) {
StreamEvent event = streamEventChunk.next();
if(event.getType() != ComplexEvent.Type.TIMER) {
streamEventChunk.remove(); //We might have the rest of the events linked to this event forming a chain.
long timestamp = (Long) timestampExecutor.execute(event);
if (expireFlag) {
if (timestamp < lastSentTimeStamp) {
continue;
}
}
ArrayList<StreamEvent> eventList = eventTreeMap.get(timestamp);
if (eventList == null) {
eventList = new ArrayList<StreamEvent>();
}
eventList.add(event);
eventTreeMap.put(timestamp, eventList);
if (timestamp > greatestTimestamp) {
greatestTimestamp = timestamp;
long minTimestamp = eventTreeMap.firstKey();
if ((greatestTimestamp - minTimestamp) > k) {
if ((greatestTimestamp - minTimestamp) < MAX_K) {
k = greatestTimestamp - minTimestamp;
} else {
k = MAX_K;
}
}
lock.lock();
Iterator<Map.Entry<Long, ArrayList<StreamEvent>>> entryIterator = eventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
ArrayList<StreamEvent> list = expiredEventTreeMap.get(entry.getKey());
if (list != null) {
list.addAll(entry.getValue());
} else {
expiredEventTreeMap.put(entry.getKey(), entry.getValue());
}
}
eventTreeMap = new TreeMap<Long, ArrayList<StreamEvent>>();
entryIterator = expiredEventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
if (entry.getKey() + k <= greatestTimestamp) {
entryIterator.remove();
ArrayList<StreamEvent> timeEventList = entry.getValue();
lastSentTimeStamp = entry.getKey();
for (StreamEvent aTimeEventList : timeEventList) {
complexEventChunk.add(aTimeEventList);
}
}
}
lock.unlock();
}
} else {
onTimerEvent(expiredEventTreeMap, nextProcessor);
lastScheduledTimestamp = lastScheduledTimestamp + TIMER_DURATION;
scheduler.notifyAt(lastScheduledTimestamp);
}
}
} catch (ArrayIndexOutOfBoundsException ec) {
// This happens due to user specifying an invalid field index.
throw new ExecutionPlanCreationException("The very first parameter must be an Integer with a valid " +
" field index (0 to (fieldsLength-1)).");
}
nextProcessor.process(complexEventChunk);
}
#location 74
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void process(ComplexEventChunk complexEventChunk) {
if (trigger) {
ComplexEventChunk<StateEvent> returnEventChunk = new ComplexEventChunk<StateEvent>(true);
StateEvent joinStateEvent = new StateEvent(2, 0);
StreamEvent nextEvent = (StreamEvent) complexEventChunk.getFirst();
complexEventChunk.clear();
while (nextEvent != null) {
StreamEvent streamEvent = nextEvent;
nextEvent = streamEvent.getNext();
streamEvent.setNext(null);
joinLock.lock();
try {
ComplexEvent.Type eventType = streamEvent.getType();
if (eventType == ComplexEvent.Type.TIMER) {
continue;
} else if (eventType == ComplexEvent.Type.RESET) {
if (outerJoinProcessor && !leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(null, streamEvent, eventType));
} else if (outerJoinProcessor && leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(streamEvent, null, eventType));
}
} else {
joinStateEvent.setEvent(matchingStreamIndex, streamEvent);
StreamEvent foundStreamEvent = findableProcessor.find(joinStateEvent, finder);
joinStateEvent.setEvent(matchingStreamIndex, null);
if (foundStreamEvent == null) {
if (outerJoinProcessor && !leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(foundStreamEvent, streamEvent, eventType));
} else if (outerJoinProcessor && leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(streamEvent, foundStreamEvent, eventType));
}
} else {
while (foundStreamEvent != null) {
if (!leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(foundStreamEvent, streamEvent, eventType));
} else {
returnEventChunk.add(joinEventBuilder(streamEvent, foundStreamEvent, eventType));
}
foundStreamEvent = foundStreamEvent.getNext();
}
}
}
} finally {
joinLock.unlock();
}
if (returnEventChunk.getFirst() != null) {
selector.process(returnEventChunk);
returnEventChunk.clear();
}
}
} else {
if (preJoinProcessor) {
joinLock.lock();
try {
nextProcessor.process(complexEventChunk);
} finally {
joinLock.unlock();
}
}
}
}
|
#vulnerable code
@Override
public void process(ComplexEventChunk complexEventChunk) {
if (trigger) {
ComplexEventChunk<StateEvent> returnEventChunk = new ComplexEventChunk<StateEvent>(true);
StateEvent joinStateEvent = new StateEvent(2, 0);
StreamEvent nextEvent = (StreamEvent) complexEventChunk.getFirst();
complexEventChunk.clear();
while (nextEvent != null) {
StreamEvent streamEvent = nextEvent;
nextEvent = streamEvent.getNext();
streamEvent.setNext(null);
if (streamEvent.getType() == ComplexEvent.Type.RESET) {
if (outerJoinProcessor && !leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(null, streamEvent, streamEvent.getType()));
} else if (outerJoinProcessor && leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(streamEvent, null, streamEvent.getType()));
}
selector.process(returnEventChunk);
returnEventChunk.clear();
continue;
}
joinLock.lock();
try {
ComplexEvent.Type eventType = streamEvent.getType();
if (eventType == ComplexEvent.Type.TIMER) {
continue;
}
joinStateEvent.setEvent(matchingStreamIndex, streamEvent);
StreamEvent foundStreamEvent = findableProcessor.find(joinStateEvent, finder);
joinStateEvent.setEvent(matchingStreamIndex, null);
if (foundStreamEvent == null) {
if (outerJoinProcessor && !leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(foundStreamEvent, streamEvent, eventType));
} else if (outerJoinProcessor && leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(streamEvent, foundStreamEvent, eventType));
}
} else {
while (foundStreamEvent != null) {
if (!leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(foundStreamEvent, streamEvent, eventType));
} else {
returnEventChunk.add(joinEventBuilder(streamEvent, foundStreamEvent, eventType));
}
foundStreamEvent = foundStreamEvent.getNext();
}
}
} finally {
joinLock.unlock();
}
if (returnEventChunk.getFirst() != null) {
selector.process(returnEventChunk);
returnEventChunk.clear();
}
}
}
}
#location 35
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
protected void process(ComplexEventChunk<StreamEvent> streamEventChunk, Processor nextProcessor, StreamEventCloner streamEventCloner) {
while (streamEventChunk.hasNext()) {
StreamEvent streamEvent = streamEventChunk.next();
StreamEvent clonedEvent = streamEventCloner.copyStreamEvent(streamEvent);
clonedEvent.setType(StreamEvent.Type.EXPIRED);
if (count < length) {
count++;
this.expiredEventChunk.add(clonedEvent);
} else {
StreamEvent firstEvent = this.expiredEventChunk.poll();
if(firstEvent!=null) {
streamEventChunk.insertBeforeCurrent(firstEvent);
this.expiredEventChunk.add(clonedEvent);
}
else {
streamEventChunk.insertBeforeCurrent(clonedEvent);
}
}
}
nextProcessor.process(streamEventChunk);
}
|
#vulnerable code
@Override
protected void process(ComplexEventChunk<StreamEvent> streamEventChunk, Processor nextProcessor, StreamEventCloner streamEventCloner) {
while (streamEventChunk.hasNext()) {
StreamEvent streamEvent = streamEventChunk.next();
StreamEvent clonedEvent = streamEventCloner.copyStreamEvent(streamEvent);
clonedEvent.setType(StreamEvent.Type.EXPIRED);
if (count < length) {
count++;
this.expiredEventChunk.add(clonedEvent);
} else {
StreamEvent firstEvent = this.expiredEventChunk.poll();
streamEventChunk.insertBeforeCurrent(firstEvent);
this.expiredEventChunk.add(clonedEvent);
}
}
nextProcessor.process(streamEventChunk);
}
#location 12
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
protected void process(ComplexEventChunk<StreamEvent> streamEventChunk, Processor nextProcessor,
StreamEventCloner streamEventCloner, ComplexEventPopulater complexEventPopulater) {
ComplexEventChunk<StreamEvent> complexEventChunk = new ComplexEventChunk<StreamEvent>(false);
lock.lock();
try {
while (streamEventChunk.hasNext()) {
StreamEvent event = streamEventChunk.next();
if(event.getType() != ComplexEvent.Type.TIMER) {
streamEventChunk.remove(); //We might have the rest of the events linked to this event forming a chain.
long timestamp = (Long) timestampExecutor.execute(event);
if (expireFlag) {
if (timestamp < lastSentTimeStamp) {
continue;
}
}
ArrayList<StreamEvent> eventList = eventTreeMap.get(timestamp);
if (eventList == null) {
eventList = new ArrayList<StreamEvent>();
}
eventList.add(event);
eventTreeMap.put(timestamp, eventList);
if (timestamp > greatestTimestamp) {
greatestTimestamp = timestamp;
long minTimestamp = eventTreeMap.firstKey();
long timeDifference = greatestTimestamp - minTimestamp;
if (timeDifference > k) {
if (timeDifference < MAX_K) {
k = timeDifference;
} else {
k = MAX_K;
}
}
Iterator<Map.Entry<Long, ArrayList<StreamEvent>>> entryIterator = eventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
ArrayList<StreamEvent> list = expiredEventTreeMap.get(entry.getKey());
if (list != null) {
list.addAll(entry.getValue());
} else {
expiredEventTreeMap.put(entry.getKey(), entry.getValue());
}
}
eventTreeMap = new TreeMap<Long, ArrayList<StreamEvent>>();
entryIterator = expiredEventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
if (entry.getKey() + k <= greatestTimestamp) {
entryIterator.remove();
ArrayList<StreamEvent> timeEventList = entry.getValue();
lastSentTimeStamp = entry.getKey();
for (StreamEvent aTimeEventList : timeEventList) {
complexEventChunk.add(aTimeEventList);
}
}
}
}
} else {
if(expiredEventTreeMap.size() > 0) {
TreeMap<Long, ArrayList<StreamEvent>> expiredEventTreeMapSnapShot = expiredEventTreeMap;
expiredEventTreeMap = new TreeMap<Long, ArrayList<StreamEvent>>();
onTimerEvent(expiredEventTreeMapSnapShot, nextProcessor);
lastScheduledTimestamp = lastScheduledTimestamp + TIMER_DURATION;
scheduler.notifyAt(lastScheduledTimestamp);
}
}
}
} catch (ArrayIndexOutOfBoundsException ec) {
//This happens due to user specifying an invalid field index.
throw new ExecutionPlanCreationException("The very first parameter must be an Integer with a valid " +
" field index (0 to (fieldsLength-1)).");
}
lock.unlock();
nextProcessor.process(complexEventChunk);
}
|
#vulnerable code
@Override
protected void process(ComplexEventChunk<StreamEvent> streamEventChunk, Processor nextProcessor,
StreamEventCloner streamEventCloner, ComplexEventPopulater complexEventPopulater) {
ComplexEventChunk<StreamEvent> complexEventChunk = new ComplexEventChunk<StreamEvent>(false);
try {
while (streamEventChunk.hasNext()) {
StreamEvent event = streamEventChunk.next();
if(event.getType() != ComplexEvent.Type.TIMER) {
streamEventChunk.remove(); //We might have the rest of the events linked to this event forming a chain.
long timestamp = (Long) timestampExecutor.execute(event);
if (expireFlag) {
if (timestamp < lastSentTimeStamp) {
continue;
}
}
ArrayList<StreamEvent> eventList = eventTreeMap.get(timestamp);
if (eventList == null) {
eventList = new ArrayList<StreamEvent>();
}
eventList.add(event);
eventTreeMap.put(timestamp, eventList);
if (timestamp > greatestTimestamp) {
greatestTimestamp = timestamp;
long minTimestamp = eventTreeMap.firstKey();
if ((greatestTimestamp - minTimestamp) > k) {
if ((greatestTimestamp - minTimestamp) < MAX_K) {
k = greatestTimestamp - minTimestamp;
} else {
k = MAX_K;
}
}
lock.lock();
Iterator<Map.Entry<Long, ArrayList<StreamEvent>>> entryIterator = eventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
ArrayList<StreamEvent> list = expiredEventTreeMap.get(entry.getKey());
if (list != null) {
list.addAll(entry.getValue());
} else {
expiredEventTreeMap.put(entry.getKey(), entry.getValue());
}
}
eventTreeMap = new TreeMap<Long, ArrayList<StreamEvent>>();
entryIterator = expiredEventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
if (entry.getKey() + k <= greatestTimestamp) {
entryIterator.remove();
ArrayList<StreamEvent> timeEventList = entry.getValue();
lastSentTimeStamp = entry.getKey();
for (StreamEvent aTimeEventList : timeEventList) {
complexEventChunk.add(aTimeEventList);
}
}
}
lock.unlock();
}
} else {
onTimerEvent(expiredEventTreeMap, nextProcessor);
lastScheduledTimestamp = lastScheduledTimestamp + TIMER_DURATION;
scheduler.notifyAt(lastScheduledTimestamp);
}
}
} catch (ArrayIndexOutOfBoundsException ec) {
// This happens due to user specifying an invalid field index.
throw new ExecutionPlanCreationException("The very first parameter must be an Integer with a valid " +
" field index (0 to (fieldsLength-1)).");
}
nextProcessor.process(complexEventChunk);
}
#location 32
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
protected void process(ComplexEventChunk<StreamEvent> streamEventChunk, Processor nextProcessor,
StreamEventCloner streamEventCloner, ComplexEventPopulater complexEventPopulater) {
ComplexEventChunk<StreamEvent> complexEventChunk = new ComplexEventChunk<StreamEvent>(false);
lock.lock();
try {
while (streamEventChunk.hasNext()) {
StreamEvent event = streamEventChunk.next();
if(event.getType() != ComplexEvent.Type.TIMER) {
streamEventChunk.remove(); //We might have the rest of the events linked to this event forming a chain.
long timestamp = (Long) timestampExecutor.execute(event);
if (expireFlag) {
if (timestamp < lastSentTimeStamp) {
continue;
}
}
ArrayList<StreamEvent> eventList = eventTreeMap.get(timestamp);
if (eventList == null) {
eventList = new ArrayList<StreamEvent>();
}
eventList.add(event);
eventTreeMap.put(timestamp, eventList);
if (timestamp > greatestTimestamp) {
greatestTimestamp = timestamp;
long minTimestamp = eventTreeMap.firstKey();
long timeDifference = greatestTimestamp - minTimestamp;
if (timeDifference > k) {
if (timeDifference < MAX_K) {
k = timeDifference;
} else {
k = MAX_K;
}
}
Iterator<Map.Entry<Long, ArrayList<StreamEvent>>> entryIterator = eventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
ArrayList<StreamEvent> list = expiredEventTreeMap.get(entry.getKey());
if (list != null) {
list.addAll(entry.getValue());
} else {
expiredEventTreeMap.put(entry.getKey(), entry.getValue());
}
}
eventTreeMap = new TreeMap<Long, ArrayList<StreamEvent>>();
entryIterator = expiredEventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
if (entry.getKey() + k <= greatestTimestamp) {
entryIterator.remove();
ArrayList<StreamEvent> timeEventList = entry.getValue();
lastSentTimeStamp = entry.getKey();
for (StreamEvent aTimeEventList : timeEventList) {
complexEventChunk.add(aTimeEventList);
}
}
}
}
} else {
if(expiredEventTreeMap.size() > 0) {
TreeMap<Long, ArrayList<StreamEvent>> expiredEventTreeMapSnapShot = expiredEventTreeMap;
expiredEventTreeMap = new TreeMap<Long, ArrayList<StreamEvent>>();
onTimerEvent(expiredEventTreeMapSnapShot, nextProcessor);
lastScheduledTimestamp = lastScheduledTimestamp + TIMER_DURATION;
scheduler.notifyAt(lastScheduledTimestamp);
}
}
}
} catch (ArrayIndexOutOfBoundsException ec) {
//This happens due to user specifying an invalid field index.
throw new ExecutionPlanCreationException("The very first parameter must be an Integer with a valid " +
" field index (0 to (fieldsLength-1)).");
}
lock.unlock();
nextProcessor.process(complexEventChunk);
}
|
#vulnerable code
@Override
protected void process(ComplexEventChunk<StreamEvent> streamEventChunk, Processor nextProcessor,
StreamEventCloner streamEventCloner, ComplexEventPopulater complexEventPopulater) {
ComplexEventChunk<StreamEvent> complexEventChunk = new ComplexEventChunk<StreamEvent>(false);
try {
while (streamEventChunk.hasNext()) {
StreamEvent event = streamEventChunk.next();
if(event.getType() != ComplexEvent.Type.TIMER) {
streamEventChunk.remove(); //We might have the rest of the events linked to this event forming a chain.
long timestamp = (Long) timestampExecutor.execute(event);
if (expireFlag) {
if (timestamp < lastSentTimeStamp) {
continue;
}
}
ArrayList<StreamEvent> eventList = eventTreeMap.get(timestamp);
if (eventList == null) {
eventList = new ArrayList<StreamEvent>();
}
eventList.add(event);
eventTreeMap.put(timestamp, eventList);
if (timestamp > greatestTimestamp) {
greatestTimestamp = timestamp;
long minTimestamp = eventTreeMap.firstKey();
if ((greatestTimestamp - minTimestamp) > k) {
if ((greatestTimestamp - minTimestamp) < MAX_K) {
k = greatestTimestamp - minTimestamp;
} else {
k = MAX_K;
}
}
lock.lock();
Iterator<Map.Entry<Long, ArrayList<StreamEvent>>> entryIterator = eventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
ArrayList<StreamEvent> list = expiredEventTreeMap.get(entry.getKey());
if (list != null) {
list.addAll(entry.getValue());
} else {
expiredEventTreeMap.put(entry.getKey(), entry.getValue());
}
}
eventTreeMap = new TreeMap<Long, ArrayList<StreamEvent>>();
entryIterator = expiredEventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
if (entry.getKey() + k <= greatestTimestamp) {
entryIterator.remove();
ArrayList<StreamEvent> timeEventList = entry.getValue();
lastSentTimeStamp = entry.getKey();
for (StreamEvent aTimeEventList : timeEventList) {
complexEventChunk.add(aTimeEventList);
}
}
}
lock.unlock();
}
} else {
onTimerEvent(expiredEventTreeMap, nextProcessor);
lastScheduledTimestamp = lastScheduledTimestamp + TIMER_DURATION;
scheduler.notifyAt(lastScheduledTimestamp);
}
}
} catch (ArrayIndexOutOfBoundsException ec) {
// This happens due to user specifying an invalid field index.
throw new ExecutionPlanCreationException("The very first parameter must be an Integer with a valid " +
" field index (0 to (fieldsLength-1)).");
}
nextProcessor.process(complexEventChunk);
}
#location 9
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void process(ComplexEventChunk complexEventChunk) {
if (trigger) {
ComplexEventChunk<StateEvent> returnEventChunk = new ComplexEventChunk<StateEvent>(true);
StateEvent joinStateEvent = new StateEvent(2, 0);
StreamEvent nextEvent = (StreamEvent) complexEventChunk.getFirst();
complexEventChunk.clear();
while (nextEvent != null) {
StreamEvent streamEvent = nextEvent;
nextEvent = streamEvent.getNext();
streamEvent.setNext(null);
joinLockWrapper.lock();
try {
ComplexEvent.Type eventType = streamEvent.getType();
if (eventType == ComplexEvent.Type.TIMER) {
continue;
} else if (eventType == ComplexEvent.Type.RESET) {
if (outerJoinProcessor && !leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(null, streamEvent, eventType));
} else if (outerJoinProcessor && leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(streamEvent, null, eventType));
}
} else {
joinStateEvent.setEvent(matchingStreamIndex, streamEvent);
StreamEvent foundStreamEvent = findableProcessor.find(joinStateEvent, finder);
joinStateEvent.setEvent(matchingStreamIndex, null);
if (foundStreamEvent == null) {
if (outerJoinProcessor && !leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(foundStreamEvent, streamEvent, eventType));
} else if (outerJoinProcessor && leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(streamEvent, foundStreamEvent, eventType));
}
} else {
while (foundStreamEvent != null) {
if (!leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(foundStreamEvent, streamEvent, eventType));
} else {
returnEventChunk.add(joinEventBuilder(streamEvent, foundStreamEvent, eventType));
}
foundStreamEvent = foundStreamEvent.getNext();
}
}
}
} finally {
joinLockWrapper.unlock();
}
if (returnEventChunk.getFirst() != null) {
selector.process(returnEventChunk);
returnEventChunk.clear();
}
}
}
}
|
#vulnerable code
@Override
public void process(ComplexEventChunk complexEventChunk) {
if (trigger) {
ComplexEventChunk<StateEvent> returnEventChunk = new ComplexEventChunk<StateEvent>(true);
StateEvent joinStateEvent = new StateEvent(2, 0);
StreamEvent nextEvent = (StreamEvent) complexEventChunk.getFirst();
complexEventChunk.clear();
while (nextEvent != null) {
StreamEvent streamEvent = nextEvent;
nextEvent = streamEvent.getNext();
streamEvent.setNext(null);
joinLock.lock();
try {
ComplexEvent.Type eventType = streamEvent.getType();
if (eventType == ComplexEvent.Type.TIMER) {
continue;
} else if (eventType == ComplexEvent.Type.RESET) {
if (outerJoinProcessor && !leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(null, streamEvent, eventType));
} else if (outerJoinProcessor && leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(streamEvent, null, eventType));
}
} else {
joinStateEvent.setEvent(matchingStreamIndex, streamEvent);
StreamEvent foundStreamEvent = findableProcessor.find(joinStateEvent, finder);
joinStateEvent.setEvent(matchingStreamIndex, null);
if (foundStreamEvent == null) {
if (outerJoinProcessor && !leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(foundStreamEvent, streamEvent, eventType));
} else if (outerJoinProcessor && leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(streamEvent, foundStreamEvent, eventType));
}
} else {
while (foundStreamEvent != null) {
if (!leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(foundStreamEvent, streamEvent, eventType));
} else {
returnEventChunk.add(joinEventBuilder(streamEvent, foundStreamEvent, eventType));
}
foundStreamEvent = foundStreamEvent.getNext();
}
}
}
} finally {
joinLock.unlock();
}
if (returnEventChunk.getFirst() != null) {
selector.process(returnEventChunk);
returnEventChunk.clear();
}
}
} else {
if (preJoinProcessor) {
joinLock.lock();
try {
nextProcessor.process(complexEventChunk);
} finally {
joinLock.unlock();
}
}
}
}
#location 49
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void process(ComplexEventChunk complexEventChunk) {
if (trigger) {
ComplexEventChunk<StateEvent> returnEventChunk = new ComplexEventChunk<StateEvent>(true);
StateEvent joinStateEvent = new StateEvent(2, 0);
StreamEvent nextEvent = (StreamEvent) complexEventChunk.getFirst();
complexEventChunk.clear();
while (nextEvent != null) {
StreamEvent streamEvent = nextEvent;
nextEvent = streamEvent.getNext();
streamEvent.setNext(null);
joinLock.lock();
try {
ComplexEvent.Type eventType = streamEvent.getType();
if (eventType == ComplexEvent.Type.TIMER) {
continue;
} else if (eventType == ComplexEvent.Type.RESET) {
if (outerJoinProcessor && !leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(null, streamEvent, eventType));
} else if (outerJoinProcessor && leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(streamEvent, null, eventType));
}
} else {
joinStateEvent.setEvent(matchingStreamIndex, streamEvent);
StreamEvent foundStreamEvent = findableProcessor.find(joinStateEvent, finder);
joinStateEvent.setEvent(matchingStreamIndex, null);
if (foundStreamEvent == null) {
if (outerJoinProcessor && !leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(foundStreamEvent, streamEvent, eventType));
} else if (outerJoinProcessor && leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(streamEvent, foundStreamEvent, eventType));
}
} else {
while (foundStreamEvent != null) {
if (!leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(foundStreamEvent, streamEvent, eventType));
} else {
returnEventChunk.add(joinEventBuilder(streamEvent, foundStreamEvent, eventType));
}
foundStreamEvent = foundStreamEvent.getNext();
}
}
}
} finally {
joinLock.unlock();
}
if (returnEventChunk.getFirst() != null) {
selector.process(returnEventChunk);
returnEventChunk.clear();
}
}
} else {
if (preJoinProcessor) {
joinLock.lock();
try {
nextProcessor.process(complexEventChunk);
} finally {
joinLock.unlock();
}
}
}
}
|
#vulnerable code
@Override
public void process(ComplexEventChunk complexEventChunk) {
if (trigger) {
ComplexEventChunk<StateEvent> returnEventChunk = new ComplexEventChunk<StateEvent>(true);
StateEvent joinStateEvent = new StateEvent(2, 0);
StreamEvent nextEvent = (StreamEvent) complexEventChunk.getFirst();
complexEventChunk.clear();
while (nextEvent != null) {
StreamEvent streamEvent = nextEvent;
nextEvent = streamEvent.getNext();
streamEvent.setNext(null);
if (streamEvent.getType() == ComplexEvent.Type.RESET) {
if (outerJoinProcessor && !leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(null, streamEvent, streamEvent.getType()));
} else if (outerJoinProcessor && leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(streamEvent, null, streamEvent.getType()));
}
selector.process(returnEventChunk);
returnEventChunk.clear();
continue;
}
joinLock.lock();
try {
ComplexEvent.Type eventType = streamEvent.getType();
if (eventType == ComplexEvent.Type.TIMER) {
continue;
}
joinStateEvent.setEvent(matchingStreamIndex, streamEvent);
StreamEvent foundStreamEvent = findableProcessor.find(joinStateEvent, finder);
joinStateEvent.setEvent(matchingStreamIndex, null);
if (foundStreamEvent == null) {
if (outerJoinProcessor && !leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(foundStreamEvent, streamEvent, eventType));
} else if (outerJoinProcessor && leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(streamEvent, foundStreamEvent, eventType));
}
} else {
while (foundStreamEvent != null) {
if (!leftJoinProcessor) {
returnEventChunk.add(joinEventBuilder(foundStreamEvent, streamEvent, eventType));
} else {
returnEventChunk.add(joinEventBuilder(streamEvent, foundStreamEvent, eventType));
}
foundStreamEvent = foundStreamEvent.getNext();
}
}
} finally {
joinLock.unlock();
}
if (returnEventChunk.getFirst() != null) {
selector.process(returnEventChunk);
returnEventChunk.clear();
}
}
}
}
#location 18
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
protected void process(ComplexEventChunk<StreamEvent> streamEventChunk, Processor nextProcessor,
StreamEventCloner streamEventCloner, ComplexEventPopulater complexEventPopulater) {
ComplexEventChunk<StreamEvent> complexEventChunk = new ComplexEventChunk<StreamEvent>(false);
lock.lock();
try {
while (streamEventChunk.hasNext()) {
StreamEvent event = streamEventChunk.next();
if(event.getType() != ComplexEvent.Type.TIMER) {
streamEventChunk.remove(); //We might have the rest of the events linked to this event forming a chain.
long timestamp = (Long) timestampExecutor.execute(event);
if (expireFlag) {
if (timestamp < lastSentTimeStamp) {
continue;
}
}
ArrayList<StreamEvent> eventList = eventTreeMap.get(timestamp);
if (eventList == null) {
eventList = new ArrayList<StreamEvent>();
}
eventList.add(event);
eventTreeMap.put(timestamp, eventList);
if (timestamp > greatestTimestamp) {
greatestTimestamp = timestamp;
long minTimestamp = eventTreeMap.firstKey();
long timeDifference = greatestTimestamp - minTimestamp;
if (timeDifference > k) {
if (timeDifference < MAX_K) {
k = timeDifference;
} else {
k = MAX_K;
}
}
Iterator<Map.Entry<Long, ArrayList<StreamEvent>>> entryIterator = eventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
ArrayList<StreamEvent> list = expiredEventTreeMap.get(entry.getKey());
if (list != null) {
list.addAll(entry.getValue());
} else {
expiredEventTreeMap.put(entry.getKey(), entry.getValue());
}
}
eventTreeMap = new TreeMap<Long, ArrayList<StreamEvent>>();
entryIterator = expiredEventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
if (entry.getKey() + k <= greatestTimestamp) {
entryIterator.remove();
ArrayList<StreamEvent> timeEventList = entry.getValue();
lastSentTimeStamp = entry.getKey();
for (StreamEvent aTimeEventList : timeEventList) {
complexEventChunk.add(aTimeEventList);
}
}
}
}
} else {
if(expiredEventTreeMap.size() > 0) {
TreeMap<Long, ArrayList<StreamEvent>> expiredEventTreeMapSnapShot = expiredEventTreeMap;
expiredEventTreeMap = new TreeMap<Long, ArrayList<StreamEvent>>();
onTimerEvent(expiredEventTreeMapSnapShot, nextProcessor);
lastScheduledTimestamp = lastScheduledTimestamp + TIMER_DURATION;
scheduler.notifyAt(lastScheduledTimestamp);
}
}
}
} catch (ArrayIndexOutOfBoundsException ec) {
//This happens due to user specifying an invalid field index.
throw new ExecutionPlanCreationException("The very first parameter must be an Integer with a valid " +
" field index (0 to (fieldsLength-1)).");
}
lock.unlock();
nextProcessor.process(complexEventChunk);
}
|
#vulnerable code
@Override
protected void process(ComplexEventChunk<StreamEvent> streamEventChunk, Processor nextProcessor,
StreamEventCloner streamEventCloner, ComplexEventPopulater complexEventPopulater) {
ComplexEventChunk<StreamEvent> complexEventChunk = new ComplexEventChunk<StreamEvent>(false);
try {
while (streamEventChunk.hasNext()) {
StreamEvent event = streamEventChunk.next();
if(event.getType() != ComplexEvent.Type.TIMER) {
streamEventChunk.remove(); //We might have the rest of the events linked to this event forming a chain.
long timestamp = (Long) timestampExecutor.execute(event);
if (expireFlag) {
if (timestamp < lastSentTimeStamp) {
continue;
}
}
ArrayList<StreamEvent> eventList = eventTreeMap.get(timestamp);
if (eventList == null) {
eventList = new ArrayList<StreamEvent>();
}
eventList.add(event);
eventTreeMap.put(timestamp, eventList);
if (timestamp > greatestTimestamp) {
greatestTimestamp = timestamp;
long minTimestamp = eventTreeMap.firstKey();
if ((greatestTimestamp - minTimestamp) > k) {
if ((greatestTimestamp - minTimestamp) < MAX_K) {
k = greatestTimestamp - minTimestamp;
} else {
k = MAX_K;
}
}
lock.lock();
Iterator<Map.Entry<Long, ArrayList<StreamEvent>>> entryIterator = eventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
ArrayList<StreamEvent> list = expiredEventTreeMap.get(entry.getKey());
if (list != null) {
list.addAll(entry.getValue());
} else {
expiredEventTreeMap.put(entry.getKey(), entry.getValue());
}
}
eventTreeMap = new TreeMap<Long, ArrayList<StreamEvent>>();
entryIterator = expiredEventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
if (entry.getKey() + k <= greatestTimestamp) {
entryIterator.remove();
ArrayList<StreamEvent> timeEventList = entry.getValue();
lastSentTimeStamp = entry.getKey();
for (StreamEvent aTimeEventList : timeEventList) {
complexEventChunk.add(aTimeEventList);
}
}
}
lock.unlock();
}
} else {
onTimerEvent(expiredEventTreeMap, nextProcessor);
lastScheduledTimestamp = lastScheduledTimestamp + TIMER_DURATION;
scheduler.notifyAt(lastScheduledTimestamp);
}
}
} catch (ArrayIndexOutOfBoundsException ec) {
// This happens due to user specifying an invalid field index.
throw new ExecutionPlanCreationException("The very first parameter must be an Integer with a valid " +
" field index (0 to (fieldsLength-1)).");
}
nextProcessor.process(complexEventChunk);
}
#location 39
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
protected void process(ComplexEventChunk<StreamEvent> streamEventChunk, Processor nextProcessor,
StreamEventCloner streamEventCloner, ComplexEventPopulater complexEventPopulater) {
ComplexEventChunk<StreamEvent> complexEventChunk = new ComplexEventChunk<StreamEvent>(false);
lock.lock();
try {
while (streamEventChunk.hasNext()) {
StreamEvent event = streamEventChunk.next();
if(event.getType() != ComplexEvent.Type.TIMER) {
streamEventChunk.remove(); //We might have the rest of the events linked to this event forming a chain.
long timestamp = (Long) timestampExecutor.execute(event);
if (expireFlag) {
if (timestamp < lastSentTimeStamp) {
continue;
}
}
ArrayList<StreamEvent> eventList = eventTreeMap.get(timestamp);
if (eventList == null) {
eventList = new ArrayList<StreamEvent>();
}
eventList.add(event);
eventTreeMap.put(timestamp, eventList);
if (timestamp > greatestTimestamp) {
greatestTimestamp = timestamp;
long minTimestamp = eventTreeMap.firstKey();
long timeDifference = greatestTimestamp - minTimestamp;
if (timeDifference > k) {
if (timeDifference < MAX_K) {
k = timeDifference;
} else {
k = MAX_K;
}
}
Iterator<Map.Entry<Long, ArrayList<StreamEvent>>> entryIterator = eventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
ArrayList<StreamEvent> list = expiredEventTreeMap.get(entry.getKey());
if (list != null) {
list.addAll(entry.getValue());
} else {
expiredEventTreeMap.put(entry.getKey(), entry.getValue());
}
}
eventTreeMap = new TreeMap<Long, ArrayList<StreamEvent>>();
entryIterator = expiredEventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
if (entry.getKey() + k <= greatestTimestamp) {
entryIterator.remove();
ArrayList<StreamEvent> timeEventList = entry.getValue();
lastSentTimeStamp = entry.getKey();
for (StreamEvent aTimeEventList : timeEventList) {
complexEventChunk.add(aTimeEventList);
}
}
}
}
} else {
if(expiredEventTreeMap.size() > 0) {
TreeMap<Long, ArrayList<StreamEvent>> expiredEventTreeMapSnapShot = expiredEventTreeMap;
expiredEventTreeMap = new TreeMap<Long, ArrayList<StreamEvent>>();
onTimerEvent(expiredEventTreeMapSnapShot, nextProcessor);
lastScheduledTimestamp = lastScheduledTimestamp + TIMER_DURATION;
scheduler.notifyAt(lastScheduledTimestamp);
}
}
}
} catch (ArrayIndexOutOfBoundsException ec) {
//This happens due to user specifying an invalid field index.
throw new ExecutionPlanCreationException("The very first parameter must be an Integer with a valid " +
" field index (0 to (fieldsLength-1)).");
}
lock.unlock();
nextProcessor.process(complexEventChunk);
}
|
#vulnerable code
@Override
protected void process(ComplexEventChunk<StreamEvent> streamEventChunk, Processor nextProcessor,
StreamEventCloner streamEventCloner, ComplexEventPopulater complexEventPopulater) {
ComplexEventChunk<StreamEvent> complexEventChunk = new ComplexEventChunk<StreamEvent>(false);
try {
while (streamEventChunk.hasNext()) {
StreamEvent event = streamEventChunk.next();
if(event.getType() != ComplexEvent.Type.TIMER) {
streamEventChunk.remove(); //We might have the rest of the events linked to this event forming a chain.
long timestamp = (Long) timestampExecutor.execute(event);
if (expireFlag) {
if (timestamp < lastSentTimeStamp) {
continue;
}
}
ArrayList<StreamEvent> eventList = eventTreeMap.get(timestamp);
if (eventList == null) {
eventList = new ArrayList<StreamEvent>();
}
eventList.add(event);
eventTreeMap.put(timestamp, eventList);
if (timestamp > greatestTimestamp) {
greatestTimestamp = timestamp;
long minTimestamp = eventTreeMap.firstKey();
if ((greatestTimestamp - minTimestamp) > k) {
if ((greatestTimestamp - minTimestamp) < MAX_K) {
k = greatestTimestamp - minTimestamp;
} else {
k = MAX_K;
}
}
lock.lock();
Iterator<Map.Entry<Long, ArrayList<StreamEvent>>> entryIterator = eventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
ArrayList<StreamEvent> list = expiredEventTreeMap.get(entry.getKey());
if (list != null) {
list.addAll(entry.getValue());
} else {
expiredEventTreeMap.put(entry.getKey(), entry.getValue());
}
}
eventTreeMap = new TreeMap<Long, ArrayList<StreamEvent>>();
entryIterator = expiredEventTreeMap.entrySet().iterator();
while (entryIterator.hasNext()) {
Map.Entry<Long, ArrayList<StreamEvent>> entry = entryIterator.next();
if (entry.getKey() + k <= greatestTimestamp) {
entryIterator.remove();
ArrayList<StreamEvent> timeEventList = entry.getValue();
lastSentTimeStamp = entry.getKey();
for (StreamEvent aTimeEventList : timeEventList) {
complexEventChunk.add(aTimeEventList);
}
}
}
lock.unlock();
}
} else {
onTimerEvent(expiredEventTreeMap, nextProcessor);
lastScheduledTimestamp = lastScheduledTimestamp + TIMER_DURATION;
scheduler.notifyAt(lastScheduledTimestamp);
}
}
} catch (ArrayIndexOutOfBoundsException ec) {
// This happens due to user specifying an invalid field index.
throw new ExecutionPlanCreationException("The very first parameter must be an Integer with a valid " +
" field index (0 to (fieldsLength-1)).");
}
nextProcessor.process(complexEventChunk);
}
#location 76
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException {
LOGGER.info("CSP-Reporting-Servlet");
try (BufferedReader reader = new BufferedReader(new InputStreamReader(request.getInputStream()))) {
StringBuilder responseBuilder = new StringBuilder();
String inputStr;
while ((inputStr = reader.readLine()) != null) {
responseBuilder.append(inputStr);
}
LOGGER.info("REPORT " + responseBuilder.toString());
JSONObject json = new JSONObject(responseBuilder.toString());
JSONObject cspReport = json.getJSONObject("csp-report");
LOGGER.info("document-uri: " + cspReport.getString("document-uri"));
LOGGER.info("referrer: " + cspReport.getString("referrer"));
LOGGER.info("blocked-uri: " + cspReport.getString("blocked-uri"));
LOGGER.info("violated-directive: " + cspReport.getString("violated-directive"));
LOGGER.info("source-file: " + cspReport.getString("source-file"));
LOGGER.info("script-sample: " + cspReport.getString("script-sample"));
LOGGER.info("line-number: " + cspReport.getString("line-number"));
} catch (IOException | JSONException ex) {
LOGGER.error(ex.getMessage(), ex);
}
}
|
#vulnerable code
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException {
LOGGER.info("CSP-Reporting-Servlet");
try {
BufferedReader reader = new BufferedReader(new InputStreamReader(request.getInputStream()));
StringBuilder responseBuilder = new StringBuilder();
String inputStr;
while ((inputStr = reader.readLine()) != null) {
responseBuilder.append(inputStr);
}
LOGGER.info("REPORT " + responseBuilder.toString());
JSONObject json = new JSONObject(responseBuilder.toString());
JSONObject cspReport = json.getJSONObject("csp-report");
LOGGER.info("document-uri: " + cspReport.getString("document-uri"));
LOGGER.info("referrer: " + cspReport.getString("referrer"));
LOGGER.info("blocked-uri: " + cspReport.getString("blocked-uri"));
LOGGER.info("violated-directive: " + cspReport.getString("violated-directive"));
LOGGER.info("source-file: " + cspReport.getString("source-file"));
LOGGER.info("script-sample: " + cspReport.getString("script-sample"));
LOGGER.info("line-number: " + cspReport.getString("line-number"));
} catch (IOException | JSONException ex) {
LOGGER.error(ex.getMessage(), ex);
}
}
#location 24
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
protected void registerWithLongPollingServer(AsyncBrowserSession bs) {
JUnitSession.getInstance().getDependency(HttpLongPollingServer.class).registerBrowserSession(bs);
}
|
#vulnerable code
protected void registerWithLongPollingServer(AsyncBrowserSession bs) {
JUnitSession.getInstance().getDependency(AsyncServerSession.class).registerBrowserSession(bs);
}
#location 2
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public CodeGenerator newCodeGenerator(final AviatorClassLoader classLoader) {
switch (getOptimizeLevel()) {
case AviatorEvaluator.COMPILE:
ASMCodeGenerator asmCodeGenerator =
new ASMCodeGenerator(this, classLoader, this.traceOutputStream);
asmCodeGenerator.start();
return asmCodeGenerator;
case AviatorEvaluator.EVAL:
return new OptimizeCodeGenerator(this, classLoader, this.traceOutputStream);
default:
throw new IllegalArgumentException("Unknow option " + getOptimizeLevel());
}
}
|
#vulnerable code
public CodeGenerator newCodeGenerator(final AviatorClassLoader classLoader) {
switch (getOptimizeLevel()) {
case AviatorEvaluator.COMPILE:
ASMCodeGenerator asmCodeGenerator = new ASMCodeGenerator(this, classLoader,
this.traceOutputStream, getOptionValue(Options.TRACE).bool);
asmCodeGenerator.start();
return asmCodeGenerator;
case AviatorEvaluator.EVAL:
return new OptimizeCodeGenerator(this, classLoader, this.traceOutputStream,
getOptionValue(Options.TRACE).bool);
default:
throw new IllegalArgumentException("Unknow option " + getOptimizeLevel());
}
}
#location 10
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
protected Env genTopEnv(Map<String, Object> map) {
Env env = newEnv(map,
this.instance.getOption(Options.USE_USER_ENV_AS_TOP_ENV_DIRECTLY) == Boolean.TRUE);
if (this.compileEnv != null && !this.compileEnv.isEmpty()) {
env.putAll(this.compileEnv);
}
return env;
}
|
#vulnerable code
protected Env genTopEnv(Map<String, Object> map) {
Env env =
newEnv(map, (boolean) this.instance.getOption(Options.USE_USER_ENV_AS_TOP_ENV_DIRECTLY));
if (this.compileEnv != null && !this.compileEnv.isEmpty()) {
env.putAll(this.compileEnv);
}
return env;
}
#location 3
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void generateMergedFile(String filename) {
File origFile = originalCodebase.getFile(filename);
boolean origExists = filesystem.exists(origFile);
File destFile = destinationCodebase.getFile(filename);
boolean destExists = filesystem.exists(destFile);
File modFile = modifiedCodebase.getFile(filename);
boolean modExists = filesystem.exists(modFile);
if (!destExists && !modExists) {
// This should never be thrown since generateMergedFile(...) is only called on filesToMerge
// from merge() which is the union of the files in the destination and modified codebases.
throw new MoeProblem(
"%s doesn't exist in either %s nor %s. This should not be possible.",
filename,
destinationCodebase,
modifiedCodebase);
} else if (origExists && modExists && !destExists) {
if (areDifferent(filename, origFile, modFile)) {
// Proceed and merge in /dev/null, which should produce a merge conflict (incoming edit on
// delete).
destFile = new File("/dev/null");
} else {
// Defer to deletion in destination codebase.
return;
}
} else if (origExists && !modExists && destExists) {
// Blindly follow deletion of the original file by not copying it into the merged codebase.
return;
} else if (!origExists && !(modExists && destExists)) {
// File exists only in modified or destination codebase, so just copy it over.
File existingFile = (modExists ? modFile : destFile);
copyToMergedCodebase(filename, existingFile);
return;
} else if (!origExists && modExists && destExists) {
// Merge both new files (conflict expected).
origFile = new File("/dev/null");
}
File mergedFile = copyToMergedCodebase(filename, destFile);
try {
// Merges the changes that lead from origFile to modFile into mergedFile (which is a copy
// of destFile). After, mergedFile will have the combined changes of modFile and destFile.
cmd.runCommand(
"merge",
ImmutableList.of(
mergedFile.getAbsolutePath(), origFile.getAbsolutePath(), modFile.getAbsolutePath()),
this.mergedCodebase.getPath().getAbsolutePath());
// Return status was 0 and the merge was successful. Note it.
mergedFiles.add(mergedFile.getAbsolutePath());
} catch (CommandException e) {
// If merge fails with exit status 1, then a conflict occurred. Make a note of the filepath.
if (e.returnStatus == 1) {
failedToMergeFiles.add(mergedFile.getAbsolutePath());
} else {
throw new MoeProblem(
"Merge returned with unexpected status %d when trying to run \"merge -p %s %s %s\"",
e.returnStatus,
destFile.getAbsolutePath(),
origFile.getAbsolutePath(),
modFile.getAbsolutePath());
}
}
}
|
#vulnerable code
public void generateMergedFile(String filename) {
FileSystem fs = Injector.INSTANCE.fileSystem();
File origFile = originalCodebase.getFile(filename);
boolean origExists = fs.exists(origFile);
File destFile = destinationCodebase.getFile(filename);
boolean destExists = fs.exists(destFile);
File modFile = modifiedCodebase.getFile(filename);
boolean modExists = fs.exists(modFile);
if (!destExists && !modExists) {
// This should never be thrown since generateMergedFile(...) is only called on filesToMerge
// from merge() which is the union of the files in the destination and modified codebases.
throw new MoeProblem(
"%s doesn't exist in either %s nor %s. This should not be possible.",
filename,
destinationCodebase,
modifiedCodebase);
} else if (origExists && modExists && !destExists) {
if (areDifferent(filename, origFile, modFile)) {
// Proceed and merge in /dev/null, which should produce a merge conflict (incoming edit on
// delete).
destFile = new File("/dev/null");
} else {
// Defer to deletion in destination codebase.
return;
}
} else if (origExists && !modExists && destExists) {
// Blindly follow deletion of the original file by not copying it into the merged codebase.
return;
} else if (!origExists && !(modExists && destExists)) {
// File exists only in modified or destination codebase, so just copy it over.
File existingFile = (modExists ? modFile : destFile);
copyToMergedCodebase(filename, existingFile);
return;
} else if (!origExists && modExists && destExists) {
// Merge both new files (conflict expected).
origFile = new File("/dev/null");
}
File mergedFile = copyToMergedCodebase(filename, destFile);
try {
// Merges the changes that lead from origFile to modFile into mergedFile (which is a copy
// of destFile). After, mergedFile will have the combined changes of modFile and destFile.
Injector.INSTANCE
.cmd()
.runCommand(
"merge",
ImmutableList.of(
mergedFile.getAbsolutePath(),
origFile.getAbsolutePath(),
modFile.getAbsolutePath()),
this.mergedCodebase.getPath().getAbsolutePath());
// Return status was 0 and the merge was successful. Note it.
mergedFiles.add(mergedFile.getAbsolutePath());
} catch (CommandException e) {
// If merge fails with exit status 1, then a conflict occurred. Make a note of the filepath.
if (e.returnStatus == 1) {
failedToMergeFiles.add(mergedFile.getAbsolutePath());
} else {
throw new MoeProblem(
"Merge returned with unexpected status %d when trying to run \"merge -p %s %s %s\"",
e.returnStatus,
destFile.getAbsolutePath(),
origFile.getAbsolutePath(),
modFile.getAbsolutePath());
}
}
}
#location 5
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testUpdate() {
redisSessionDAO.doCreate(session1);
redisSessionDAO.doReadSession(session1.getId());
doChangeSessionName(session1, name1);
redisSessionDAO.update(session1);
FakeSession actualSession = (FakeSession)redisSessionDAO.doReadSession(session1.getId());
assertEquals(actualSession.getName(), name1);
}
|
#vulnerable code
@Test
public void testUpdate() {
redisSessionDAO.doCreate(session1);
doChangeSessionName(session1, name1);
redisSessionDAO.update(session1);
FakeSession actualSession = (FakeSession)redisSessionDAO.doReadSession(session1.getId());
assertEquals(actualSession.getName(), name1);
}
#location 7
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testRemove() throws SerializationException {
FakeAuth nullValue = redisCache.remove(null);
assertThat(nullValue, is(nullValue()));
String testKey = "billy";
byte[] testKeyBytes = keySerializer.serialize(testPrefix + testKey);
FakeAuth testValue = new FakeAuth(3, "client");
byte[] testValueBytes = valueSerializer.serialize(testValue);
when(redisManager.get(testKeyBytes)).thenReturn(testValueBytes);
FakeAuth actualValue = redisCache.remove(testKey);
assertThat(actualValue.getId(), is(3));
assertThat(actualValue.getRole(), is("client"));
}
|
#vulnerable code
@Test
public void testRemove() {
redisCache.remove(null);
FakeSession actualValue = redisCache.remove(testKey);
assertThat(actualValue.getId(), is(3));
assertThat(actualValue.getName(), is("jack"));
}
#location 5
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testDoReadSession() throws NoSuchFieldException, IllegalAccessException {
Session nullSession = redisSessionDAO.doReadSession(null);
assertThat(nullSession, is(nullValue()));
RedisSessionDAO redisSessionDAO2 = new RedisSessionDAO();
redisSessionDAO2.setRedisManager(redisManager);
redisSessionDAO2.setKeyPrefix(testPrefix);
redisSessionDAO2.setExpire(2);
ThreadLocal sessionsInThread = mock(ThreadLocal.class);
Map<Serializable, SessionInMemory> sessionMap = new HashMap<Serializable, SessionInMemory>();
SessionInMemory sessionInMemory = new SessionInMemory();
sessionInMemory.setSession(new FakeSession(1, "Billy"));
sessionInMemory.setCreateTime(new Date());
sessionMap.put("1", sessionInMemory);
when(sessionsInThread.get()).thenReturn(sessionMap);
TestUtils.setPrivateField(redisSessionDAO2, "sessionsInThread", sessionsInThread);
FakeSession actualSession = (FakeSession)redisSessionDAO2.doReadSession("1");
assertThat(actualSession.getId().toString(), is("1"));
assertThat(actualSession.getName(), is("Billy"));
verify(redisManager, times(0)).get(any((new byte[0]).getClass()));
}
|
#vulnerable code
@Test
public void testDoReadSession() {
Session actualSession = redisSessionDAO.doReadSession(testKey);
assertThat(actualSession.getId().toString(), is("3"));
redisSessionDAO.doReadSession(null);
}
#location 4
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public static void writeFileTxt(String fileName, String[] totalFile){
FileWriter file = null;
PrintWriter pw = null;
try
{
file = new FileWriter(System.getProperty("user.dir")+"/"+fileName);
pw = new PrintWriter(file);
for (int i = 0; i < totalFile.length; i++)
pw.println(totalFile[i]);
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
// Execute the "finally" to make sure the file is closed
if (null != file)
file.close();
} catch (Exception e2) {
e2.printStackTrace();
}
try {
if (pw != null)
pw.close();
} catch (Exception e2) {
e2.printStackTrace();
}
}
}
|
#vulnerable code
public static void writeFileTxt(String fileName, String[] totalFile){
FileWriter file = null;
PrintWriter pw = null;
try
{
file = new FileWriter(System.getProperty("user.dir")+"/"+fileName);
pw = new PrintWriter(file);
for (int i = 0; i < totalFile.length; i++)
pw.println(totalFile[i]);
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
// Execute the "finally" to make sure the file is closed
if (null != file)
file.close();
} catch (Exception e2) {
e2.printStackTrace();
}
}
}
#location 17
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public static void writeFileTxt(String fileName, String[] totalFile){
FileWriter file = null;
PrintWriter pw = null;
try
{
file = new FileWriter(System.getProperty("user.dir")+"/"+fileName);
pw = new PrintWriter(file);
for (int i = 0; i < totalFile.length; i++)
pw.println(totalFile[i]);
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
// Execute the "finally" to make sure the file is closed
if (null != file)
file.close();
} catch (Exception e2) {
e2.printStackTrace();
}
try {
if (pw != null)
pw.close();
} catch (Exception e2) {
e2.printStackTrace();
}
}
}
|
#vulnerable code
public static void writeFileTxt(String fileName, String[] totalFile){
FileWriter file = null;
PrintWriter pw = null;
try
{
file = new FileWriter(System.getProperty("user.dir")+"/"+fileName);
pw = new PrintWriter(file);
for (int i = 0; i < totalFile.length; i++)
pw.println(totalFile[i]);
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
// Execute the "finally" to make sure the file is closed
if (null != file)
file.close();
} catch (Exception e2) {
e2.printStackTrace();
}
}
}
#location 18
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private void recoveryFromLog() {
synchronized (mDatasets) {
recoveryFromFile(Config.MASTER_CHECKPOINT_FILE, "Master Checkpoint file ");
recoveryFromFile(Config.MASTER_LOG_FILE, "Master Log file ");
}
}
|
#vulnerable code
private void recoveryFromLog() {
MasterLogReader reader;
synchronized (mDatasets) {
File file = new File(Config.MASTER_CHECKPOINT_FILE);
if (!file.exists()) {
LOG.info("Master Checkpoint file " + Config.MASTER_CHECKPOINT_FILE + " does not exist.");
} else {
reader = new MasterLogReader(Config.MASTER_CHECKPOINT_FILE);
while (reader.hasNext()) {
DatasetInfo dataset = reader.getNextDatasetInfo();
mDatasets.put(dataset.mId, dataset);
mDatasetPathToId.put(dataset.mPath, dataset.mId);
}
}
file = new File(Config.MASTER_LOG_FILE);
if (!file.exists()) {
LOG.info("Master Log file " + Config.MASTER_LOG_FILE + " does not exist.");
} else {
reader = new MasterLogReader(Config.MASTER_LOG_FILE);
while (reader.hasNext()) {
DatasetInfo dataset = reader.getNextDatasetInfo();
if (dataset.mId > 0) {
mDatasets.put(dataset.mId, dataset);
mDatasetPathToId.put(dataset.mPath, dataset.mId);
} else {
mDatasets.remove(-dataset.mId);
mDatasetPathToId.remove(dataset.mPath);
}
}
}
}
}
#location 13
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public JSONObject getInfo() throws EvtSdkException {
Info info = new Info();
return info.get(RequestParams.of(netParams));
}
|
#vulnerable code
public JSONObject getInfo() throws EvtSdkException {
Info info = new Info();
return info.get(netParams, null);
}
#location 3
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public JSONObject getHeadBlockHeaderState() throws EvtSdkException {
HeadBlockHeaderState headBlockHeaderState = new HeadBlockHeaderState();
return headBlockHeaderState.get(RequestParams.of(netParams));
}
|
#vulnerable code
public JSONObject getHeadBlockHeaderState() throws EvtSdkException {
HeadBlockHeaderState headBlockHeaderState = new HeadBlockHeaderState();
return headBlockHeaderState.get(netParams, null);
}
#location 3
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public static float[] getMedianErrorRates(LangDescriptor language, int maxNumFiles, int trials) throws Exception {
SubsetValidator validator = new SubsetValidator(language.corpusDir, language);
List<InputDocument> documents = load(validator.allFiles, language);
float[] medians = new float[maxNumFiles+1];
int ncpu = Runtime.getRuntime().availableProcessors();
ExecutorService pool = Executors.newFixedThreadPool(3); // works with 2 but not 3 threads. hmm...
List<Callable<Void>> jobs = new ArrayList<>();
for (int i = 1; i<=Math.min(validator.allFiles.size(), maxNumFiles); i++) { // i is corpus subset size
final int corpusSubsetSize = i;
Callable<Void> job = () -> {
try {
List<Float> errorRates = new ArrayList<>();
for (int trial = 1; trial<=trials; trial++) { // multiple trials per subset size
Pair<InputDocument, List<InputDocument>> sample = validator.selectSample(documents, corpusSubsetSize);
Triple<Formatter, Float, Float> results = validate(language, sample.b, sample.a, true, false);
// System.out.println(sample.a.fileName+" n="+corpusSubsetSize+": error="+results.c);
// System.out.println("\tcorpus =\n\t\t"+Utils.join(sample.b.iterator(), "\n\t\t"));
errorRates.add(results.c);
}
Collections.sort(errorRates);
int n = errorRates.size();
float median = errorRates.get(n/2);
System.out.println("median "+language.name+" error rate for n="+corpusSubsetSize+" is "+median);
medians[corpusSubsetSize] = median;
}
catch (Throwable t) {
t.printStackTrace(System.err);
}
return null;
};
jobs.add(job);
}
pool.invokeAll(jobs);
pool.shutdown();
boolean terminated = pool.awaitTermination(60, TimeUnit.MINUTES);
System.err.println(language.name+" terminate properly = "+terminated);
return medians;
}
|
#vulnerable code
public static float[] getMedianErrorRates(LangDescriptor language, int maxNumFiles, int trials) throws Exception {
SubsetValidator validator = new SubsetValidator(language.corpusDir, language);
List<InputDocument> documents = load(validator.allFiles, language);
float[] medians = new float[maxNumFiles+1];
for (int i = 1; i<=Math.min(validator.allFiles.size(), maxNumFiles); i++) { // i is corpus subset size
List<Float> errorRates = new ArrayList<>();
for (int trial = 1; trial<=trials; trial++) { // multiple trials per subset size
Pair<InputDocument, List<InputDocument>> sample = validator.selectSample(documents, i);
Triple<Formatter, Float, Float> results = validate(language, sample.b, sample.a, true, false);
System.out.println(sample.a.fileName+" n="+i+": error="+results.c);
// System.out.println("\tcorpus =\n\t\t"+Utils.join(sample.b.iterator(), "\n\t\t"));
errorRates.add(results.c);
}
Collections.sort(errorRates);
int n = errorRates.size();
float min = errorRates.get(0);
float quart = errorRates.get((int)(0.27*n));
float median = errorRates.get(n/2);
float quart3 = errorRates.get((int)(0.75*n));
float max = errorRates.get(n-1);
System.out.println("median error rate for n="+i+" is "+median);
medians[i] = median;
}
return medians;
}
#location 9
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public PageResult<TokenVo> listTokens(Map<String, Object> params, String clientId) {
Integer page = MapUtils.getInteger(params, "page");
Integer limit = MapUtils.getInteger(params, "limit");
int[] startEnds = PageUtil.transToStartEnd(page, limit);
//根据请求参数生成redis的key
String redisKey = getRedisKey(params, clientId);
long size = redisRepository.length(redisKey);
List<TokenVo> result = new ArrayList<>(limit);
RedisSerializer<Object> valueSerializer = RedisSerializer.java();
//查询token集合
//redisRepository.getRedisTemplate().e
List<Object> tokenObjs = redisRepository.getList(redisKey, startEnds[0], startEnds[1]-1, valueSerializer);
if (tokenObjs != null) {
for (Object obj : tokenObjs) {
DefaultOAuth2AccessToken accessToken = (DefaultOAuth2AccessToken)obj;
//构造token对象
TokenVo tokenVo = new TokenVo();
tokenVo.setTokenValue(accessToken.getValue());
tokenVo.setExpiration(accessToken.getExpiration());
//获取用户信息
Object authObj = redisRepository.get(SecurityConstants.REDIS_TOKEN_AUTH + accessToken.getValue(), valueSerializer);
OAuth2Authentication authentication = (OAuth2Authentication)authObj;
if (authentication != null) {
OAuth2Request request = authentication.getOAuth2Request();
tokenVo.setUsername(authentication.getName());
tokenVo.setClientId(request.getClientId());
tokenVo.setGrantType(request.getGrantType());
}
result.add(tokenVo);
}
}
return PageResult.<TokenVo>builder().data(result).code(0).count(size).build();
}
|
#vulnerable code
@Override
public PageResult<TokenVo> listTokens(Map<String, Object> params, String clientId) {
Integer page = MapUtils.getInteger(params, "page");
Integer limit = MapUtils.getInteger(params, "limit");
int[] startEnds = PageUtil.transToStartEnd(page, limit);
//根据请求参数生成redis的key
String redisKey = getRedisKey(params, clientId);
long size = redisRepository.length(redisKey);
List<TokenVo> result = new ArrayList<>(limit);
//查询token集合
List<Object> tokenObjs = redisRepository.getList(redisKey, startEnds[0], startEnds[1]-1);
if (tokenObjs != null) {
for (Object obj : tokenObjs) {
DefaultOAuth2AccessToken accessToken = (DefaultOAuth2AccessToken)obj;
//构造token对象
TokenVo tokenVo = new TokenVo();
tokenVo.setTokenValue(accessToken.getValue());
tokenVo.setExpiration(accessToken.getExpiration());
//获取用户信息
Object authObj = redisRepository.get(SecurityConstants.REDIS_TOKEN_AUTH + accessToken.getValue());
OAuth2Authentication authentication = (OAuth2Authentication)authObj;
if (authentication != null) {
OAuth2Request request = authentication.getOAuth2Request();
tokenVo.setUsername(authentication.getName());
tokenVo.setClientId(request.getClientId());
tokenVo.setGrantType(request.getGrantType());
}
result.add(tokenVo);
}
}
return PageResult.<TokenVo>builder().data(result).code(0).count(size).build();
}
#location 8
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
protected void assertSuccessfulAuthentication(String providerId) {
this.driver.navigate().to("http://localhost:8081/test-app/");
assertTrue(this.driver.getCurrentUrl().startsWith("http://localhost:8081/auth/realms/realm-with-broker/protocol/openid-connect/login"));
// choose the identity provider
this.loginPage.clickSocial(providerId);
assertTrue(this.driver.getCurrentUrl().startsWith("http://localhost:8082/auth/"));
// log in to identity provider
this.loginPage.login("test-user", "password");
doAfterProviderAuthentication(providerId);
doUpdateProfile(providerId);
// authenticated and redirected to app
assertTrue(this.driver.getCurrentUrl().startsWith("http://localhost:8081/test-app/"));
assertNotNull(retrieveSessionStatus());
doAssertFederatedUser(providerId);
driver.navigate().to("http://localhost:8081/test-app/logout");
driver.navigate().to("http://localhost:8081/test-app/");
assertTrue(this.driver.getCurrentUrl().startsWith("http://localhost:8081/auth/realms/realm-with-broker/protocol/openid-connect/login"));
}
|
#vulnerable code
protected void assertSuccessfulAuthentication(String providerId) {
this.driver.navigate().to("http://localhost:8081/test-app/");
assertTrue(this.driver.getCurrentUrl().startsWith("http://localhost:8081/auth/realms/realm-with-broker/protocol/openid-connect/login"));
// choose the identity provider
this.loginPage.clickSocial(providerId);
assertTrue(this.driver.getCurrentUrl().startsWith("http://localhost:8082/auth/realms/realm-with-saml-identity-provider/protocol/saml"));
// log in to identity provider
this.loginPage.login("saml.user", "password");
assertTrue(this.driver.getCurrentUrl().startsWith("http://localhost:8081/auth/broker/realm-with-broker/" + providerId));
// update profile
this.updateProfilePage.assertCurrent();
String userEmail = "[email protected]";
String userFirstName = "New first";
String userLastName = "New last";
this.updateProfilePage.update(userFirstName, userLastName, userEmail);
// authenticated and redirected to app
assertTrue(this.driver.getCurrentUrl().startsWith("http://localhost:8081/test-app/"));
KeycloakSession samlServerSession = brokerServerRule.startSession();
RealmModel brokerRealm = samlServerSession.realms().getRealm("realm-with-broker");
UserModel federatedUser = samlServerSession.users().getUserByEmail(userEmail, brokerRealm);
// user created
assertNotNull(federatedUser);
assertEquals(userFirstName, federatedUser.getFirstName());
assertEquals(userLastName, federatedUser.getLastName());
driver.navigate().to("http://localhost:8081/test-app/logout");
driver.navigate().to("http://localhost:8081/test-app/");
assertTrue(this.driver.getCurrentUrl().startsWith("http://localhost:8081/auth/realms/realm-with-broker/protocol/openid-connect/login"));
// choose the identity provider
this.loginPage.clickSocial(providerId);
// already authenticated in saml idp and redirected to app
assertTrue(this.driver.getCurrentUrl().startsWith("http://localhost:8081/test-app/"));
}
#location 35
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Path("{username}/session-stats")
@GET
@NoCache
@Produces(MediaType.APPLICATION_JSON)
public Map<String, UserStats> getSessionStats(final @PathParam("username") String username) {
logger.info("session-stats");
auth.requireView();
UserModel user = realm.getUser(username);
if (user == null) {
throw new NotFoundException("User not found");
}
Map<String, UserStats> stats = new HashMap<String, UserStats>();
for (ApplicationModel applicationModel : realm.getApplications()) {
if (applicationModel.getManagementUrl() == null) continue;
UserStats appStats = new ResourceAdminManager().getUserStats(realm, applicationModel, user);
if (appStats == null) continue;
if (appStats.isLoggedIn()) stats.put(applicationModel.getName(), appStats);
}
return stats;
}
|
#vulnerable code
@Path("{username}/session-stats")
@GET
@NoCache
@Produces(MediaType.APPLICATION_JSON)
public Map<String, UserStats> getSessionStats(final @PathParam("username") String username) {
logger.info("session-stats");
auth.requireView();
UserModel user = realm.getUser(username);
if (user == null) {
throw new NotFoundException("User not found");
}
Map<String, UserStats> stats = new HashMap<String, UserStats>();
for (ApplicationModel applicationModel : realm.getApplications()) {
if (applicationModel.getManagementUrl() == null) continue;
UserStats appStats = new ResourceAdminManager().getUserStats(realm, applicationModel, user);
if (appStats.isLoggedIn()) stats.put(applicationModel.getName(), appStats);
}
return stats;
}
#location 16
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void verifyAccess(AccessToken token, RealmModel realm, ClientModel client, UserModel user) throws OAuthErrorException {
ApplicationModel clientApp = (client instanceof ApplicationModel) ? (ApplicationModel)client : null;
if (token.getRealmAccess() != null) {
for (String roleName : token.getRealmAccess().getRoles()) {
RoleModel role = realm.getRole(roleName);
if (role == null) {
throw new OAuthErrorException(OAuthErrorException.INVALID_GRANT, "Invalid realm role " + roleName);
}
if (!user.hasRole(role)) {
throw new OAuthErrorException(OAuthErrorException.INVALID_SCOPE, "User no long has permission for realm role: " + roleName);
}
if (!client.hasScope(role)) {
throw new OAuthErrorException(OAuthErrorException.INVALID_SCOPE, "Client no longer has realm scope: " + roleName);
}
}
}
if (token.getResourceAccess() != null) {
for (Map.Entry<String, AccessToken.Access> entry : token.getResourceAccess().entrySet()) {
ApplicationModel app = realm.getApplicationByName(entry.getKey());
if (app == null) {
throw new OAuthErrorException(OAuthErrorException.INVALID_SCOPE, "Application no longer exists", "Application no longer exists: " + entry.getKey());
}
for (String roleName : entry.getValue().getRoles()) {
RoleModel role = app.getRole(roleName);
if (role == null) {
throw new OAuthErrorException(OAuthErrorException.INVALID_GRANT, "Invalid refresh token", "Unknown application role: " + roleName);
}
if (!user.hasRole(role)) {
throw new OAuthErrorException(OAuthErrorException.INVALID_SCOPE, "User no long has permission for application role " + roleName);
}
if (clientApp != null && !clientApp.equals(app) && !client.hasScope(role)) {
throw new OAuthErrorException(OAuthErrorException.INVALID_SCOPE, "Client no longer has application scope" + roleName);
}
}
}
}
}
|
#vulnerable code
public void verifyAccess(AccessToken token, RealmModel realm, ClientModel client, UserModel user) throws OAuthErrorException {
ApplicationModel clientApp = (client instanceof ApplicationModel) ? (ApplicationModel)client : null;
if (token.getRealmAccess() != null) {
for (String roleName : token.getRealmAccess().getRoles()) {
RoleModel role = realm.getRole(roleName);
if (role == null) {
throw new OAuthErrorException(OAuthErrorException.INVALID_GRANT, "Invalid realm role " + roleName);
}
if (!user.hasRole(role)) {
throw new OAuthErrorException(OAuthErrorException.INVALID_SCOPE, "User no long has permission for realm role: " + roleName);
}
if (!client.hasScope(role)) {
throw new OAuthErrorException(OAuthErrorException.INVALID_SCOPE, "Client no longer has realm scope: " + roleName);
}
}
}
if (token.getResourceAccess() != null) {
for (Map.Entry<String, AccessToken.Access> entry : token.getResourceAccess().entrySet()) {
ApplicationModel app = realm.getApplicationByName(entry.getKey());
if (app == null) {
throw new OAuthErrorException(OAuthErrorException.INVALID_SCOPE, "Application no longer exists", "Application no longer exists: " + app.getName());
}
for (String roleName : entry.getValue().getRoles()) {
RoleModel role = app.getRole(roleName);
if (role == null) {
throw new OAuthErrorException(OAuthErrorException.INVALID_GRANT, "Invalid refresh token", "Unknown application role: " + roleName);
}
if (!user.hasRole(role)) {
throw new OAuthErrorException(OAuthErrorException.INVALID_SCOPE, "User no long has permission for application role " + roleName);
}
if (clientApp != null && !clientApp.equals(app) && !client.hasScope(role)) {
throw new OAuthErrorException(OAuthErrorException.INVALID_SCOPE, "Client no longer has application scope" + roleName);
}
}
}
}
}
#location 23
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public static String getPemFromKey(Key key) {
StringWriter writer = new StringWriter();
PEMWriter pemWriter = new PEMWriter(writer);
try {
pemWriter.writeObject(key);
pemWriter.flush();
pemWriter.close();
} catch (IOException e) {
throw new RuntimeException(e);
}
String s = writer.toString();
return PemUtils.removeBeginEnd(s);
}
|
#vulnerable code
public static String getPemFromKey(Key key) {
StringWriter writer = new StringWriter();
PEMWriter pemWriter = new PEMWriter(writer);
try {
pemWriter.writeObject(key);
pemWriter.flush();
} catch (IOException e) {
throw new RuntimeException(e);
}
String s = writer.toString();
return PemUtils.removeBeginEnd(s);
}
#location 6
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public static String getPemFromCertificate(X509Certificate certificate) {
StringWriter writer = new StringWriter();
PEMWriter pemWriter = new PEMWriter(writer);
try {
pemWriter.writeObject(certificate);
pemWriter.flush();
pemWriter.close();
} catch (IOException e) {
throw new RuntimeException(e);
}
String s = writer.toString();
return PemUtils.removeBeginEnd(s);
}
|
#vulnerable code
public static String getPemFromCertificate(X509Certificate certificate) {
StringWriter writer = new StringWriter();
PEMWriter pemWriter = new PEMWriter(writer);
try {
pemWriter.writeObject(certificate);
pemWriter.flush();
} catch (IOException e) {
throw new RuntimeException(e);
}
String s = writer.toString();
return PemUtils.removeBeginEnd(s);
}
#location 6
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@DelayedWrite
public short set(int index, short value) {
boolean success = false;
int divIndex = index >> 1;
boolean isZero = (index & 0x1) == 0;
short one;
short zero;
short old = 0;
while (!success) {
int packed = live.get(divIndex);
if (isZero) {
old = unpackZero(packed);
one = unpackOne(packed);
zero = value;
} else {
old = unpackOne(packed);
one = value;
zero = unpackZero(packed);
}
success = live.compareAndSet(divIndex, packed, pack(zero, one));
}
markDirty(index);
return old;
}
|
#vulnerable code
@DelayedWrite
public short set(int index, short value) {
synchronized (live) {
live[index] = value;
}
int localDirtyIndex = dirtyIndex.getAndIncrement();
if (localDirtyIndex < dirtyArray.length) {
dirtyArray[localDirtyIndex] = index;
}
return snapshot[index];
}
#location 8
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void copySnapshot() {
int length = dirtyIndex.get();
if (length <= dirtySize) {
for (int i = 0; i < length; i++) {
int index = dirtyArray.get(i);
this.snapshot[index] = getLive(i);
}
} else {
for (int i = 0; i < snapshot.length; i++) {
this.snapshot[i] = getLive(i);
}
}
}
|
#vulnerable code
@Override
public void copySnapshot() {
int length = dirtyIndex.get();
if (length <= dirtyArray.length) {
for (int i = 0; i < length; i++) {
int index = dirtyArray[i];
this.snapshot[index] = live[index];
}
} else {
for (int i = 0; i < live.length; i++) {
this.snapshot[i] = live[i];
}
}
}
#location 11
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@DelayedWrite
public short set(int index, short value) {
boolean success = false;
int divIndex = index >> 1;
boolean isZero = (index & 0x1) == 0;
short one;
short zero;
short old = 0;
while (!success) {
int packed = live.get(divIndex);
if (isZero) {
old = unpackZero(packed);
one = unpackOne(packed);
zero = value;
} else {
old = unpackOne(packed);
one = value;
zero = unpackZero(packed);
}
success = live.compareAndSet(divIndex, packed, pack(zero, one));
}
markDirty(index);
return old;
}
|
#vulnerable code
@DelayedWrite
public short set(int index, short value) {
synchronized (live) {
live[index] = value;
}
int localDirtyIndex = dirtyIndex.getAndIncrement();
if (localDirtyIndex < dirtyArray.length) {
dirtyArray[localDirtyIndex] = index;
}
return snapshot[index];
}
#location 10
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void whenInputIsValidKeePassXmlShouldParseFileAndReturnCorrectGroups() throws FileNotFoundException {
KeePassFile keePassFile = parseKeePassXml();
List<Group> groups = keePassFile.getTopGroups();
Assert.assertNotNull(groups);
Assert.assertEquals(6, groups.size());
Assert.assertEquals("General", groups.get(0).getName());
Assert.assertEquals("FqvMJ8yjlUSAEt9OmNSj2A==", groups.get(0).getUuid());
Assert.assertEquals("Windows", groups.get(1).getName());
Assert.assertEquals("rXt7D+EM/0qW1rgPB4g5nw==", groups.get(1).getUuid());
Assert.assertEquals("Network", groups.get(2).getName());
Assert.assertEquals("DwdAaKn4tEyXFlU56/2UBQ==", groups.get(2).getUuid());
Assert.assertEquals("Internet", groups.get(3).getName());
Assert.assertEquals("COgUrPt5P0676DeyZn/auQ==", groups.get(3).getUuid());
Assert.assertEquals("eMail", groups.get(4).getName());
Assert.assertEquals("/xWfOfnC6ki76sNhrZR7rw==", groups.get(4).getUuid());
Assert.assertEquals("Homebanking", groups.get(5).getName());
Assert.assertEquals("Rdjt21Jla0+E5Q9ElJHw1g==", groups.get(5).getUuid());
}
|
#vulnerable code
@Test
public void whenInputIsValidKeePassXmlShouldParseFileAndReturnCorrectGroups() throws FileNotFoundException {
FileInputStream fileInputStream = new FileInputStream("target/test-classes/testDatabase_decrypted.xml");
KeePassFile keePassFile = new XmlParser().parse(fileInputStream, Salsa20.createInstance(protectedStreamKey));
List<Group> groups = keePassFile.getTopGroups();
Assert.assertNotNull(groups);
Assert.assertEquals(6, groups.size());
Assert.assertEquals("General", groups.get(0).getName());
Assert.assertEquals("FqvMJ8yjlUSAEt9OmNSj2A==", groups.get(0).getUuid());
Assert.assertEquals("Windows", groups.get(1).getName());
Assert.assertEquals("rXt7D+EM/0qW1rgPB4g5nw==", groups.get(1).getUuid());
Assert.assertEquals("Network", groups.get(2).getName());
Assert.assertEquals("DwdAaKn4tEyXFlU56/2UBQ==", groups.get(2).getUuid());
Assert.assertEquals("Internet", groups.get(3).getName());
Assert.assertEquals("COgUrPt5P0676DeyZn/auQ==", groups.get(3).getUuid());
Assert.assertEquals("eMail", groups.get(4).getName());
Assert.assertEquals("/xWfOfnC6ki76sNhrZR7rw==", groups.get(4).getUuid());
Assert.assertEquals("Homebanking", groups.get(5).getName());
Assert.assertEquals("Rdjt21Jla0+E5Q9ElJHw1g==", groups.get(5).getUuid());
}
#location 27
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void whenInputIsValidKeePassXmlShouldParseFileAndReturnCorrectGroups() throws FileNotFoundException {
KeePassFile keePassFile = parseKeePassXml();
List<Group> groups = keePassFile.getTopGroups();
Assert.assertNotNull(groups);
Assert.assertEquals(6, groups.size());
Assert.assertEquals("General", groups.get(0).getName());
Assert.assertEquals("FqvMJ8yjlUSAEt9OmNSj2A==", groups.get(0).getUuid());
Assert.assertEquals("Windows", groups.get(1).getName());
Assert.assertEquals("rXt7D+EM/0qW1rgPB4g5nw==", groups.get(1).getUuid());
Assert.assertEquals("Network", groups.get(2).getName());
Assert.assertEquals("DwdAaKn4tEyXFlU56/2UBQ==", groups.get(2).getUuid());
Assert.assertEquals("Internet", groups.get(3).getName());
Assert.assertEquals("COgUrPt5P0676DeyZn/auQ==", groups.get(3).getUuid());
Assert.assertEquals("eMail", groups.get(4).getName());
Assert.assertEquals("/xWfOfnC6ki76sNhrZR7rw==", groups.get(4).getUuid());
Assert.assertEquals("Homebanking", groups.get(5).getName());
Assert.assertEquals("Rdjt21Jla0+E5Q9ElJHw1g==", groups.get(5).getUuid());
}
|
#vulnerable code
@Test
public void whenInputIsValidKeePassXmlShouldParseFileAndReturnCorrectGroups() throws FileNotFoundException {
FileInputStream fileInputStream = new FileInputStream("target/test-classes/testDatabase_decrypted.xml");
KeePassFile keePassFile = new XmlParser().parse(fileInputStream, Salsa20.createInstance(protectedStreamKey));
List<Group> groups = keePassFile.getTopGroups();
Assert.assertNotNull(groups);
Assert.assertEquals(6, groups.size());
Assert.assertEquals("General", groups.get(0).getName());
Assert.assertEquals("FqvMJ8yjlUSAEt9OmNSj2A==", groups.get(0).getUuid());
Assert.assertEquals("Windows", groups.get(1).getName());
Assert.assertEquals("rXt7D+EM/0qW1rgPB4g5nw==", groups.get(1).getUuid());
Assert.assertEquals("Network", groups.get(2).getName());
Assert.assertEquals("DwdAaKn4tEyXFlU56/2UBQ==", groups.get(2).getUuid());
Assert.assertEquals("Internet", groups.get(3).getName());
Assert.assertEquals("COgUrPt5P0676DeyZn/auQ==", groups.get(3).getUuid());
Assert.assertEquals("eMail", groups.get(4).getName());
Assert.assertEquals("/xWfOfnC6ki76sNhrZR7rw==", groups.get(4).getUuid());
Assert.assertEquals("Homebanking", groups.get(5).getName());
Assert.assertEquals("Rdjt21Jla0+E5Q9ElJHw1g==", groups.get(5).getUuid());
}
#location 9
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public boolean isTitleProtected() {
return getPropertyByName(TITLE).isProtected();
}
|
#vulnerable code
public boolean isTitleProtected() {
return getPropertyByName(TITLE).isProtected();
}
#location 2
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private byte[] processDatabaseEncryption(boolean encrypt, byte[] database, KeePassHeader header, byte[] aesKey)
throws IOException {
byte[] metaData = new byte[KeePassHeader.VERSION_SIGNATURE_LENGTH + header.getHeaderSize()];
SafeInputStream inputStream = new SafeInputStream(new BufferedInputStream(new ByteArrayInputStream(database)));
inputStream.readSafe(metaData);
byte[] payload = StreamUtils.toByteArray(inputStream);
byte[] processedPayload;
if (encrypt) {
processedPayload = Aes.encrypt(aesKey, header.getEncryptionIV(), payload);
} else {
processedPayload = Aes.decrypt(aesKey, header.getEncryptionIV(), payload);
}
ByteArrayOutputStream output = new ByteArrayOutputStream();
output.write(metaData);
output.write(processedPayload);
return output.toByteArray();
}
|
#vulnerable code
private byte[] processDatabaseEncryption(boolean encrypt, byte[] database, KeePassHeader header, byte[] aesKey)
throws IOException {
byte[] metaData = new byte[KeePassHeader.VERSION_SIGNATURE_LENGTH + header.getHeaderSize()];
BufferedInputStream bufferedInputStream = new BufferedInputStream(new ByteArrayInputStream(database));
bufferedInputStream.read(metaData);
byte[] payload = StreamUtils.toByteArray(bufferedInputStream);
byte[] processedPayload;
if (encrypt) {
processedPayload = Aes.encrypt(aesKey, header.getEncryptionIV(), payload);
} else {
processedPayload = Aes.decrypt(aesKey, header.getEncryptionIV(), payload);
}
ByteArrayOutputStream output = new ByteArrayOutputStream();
output.write(metaData);
output.write(processedPayload);
return output.toByteArray();
}
#location 7
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private byte[] processDatabaseEncryption(boolean encrypt, byte[] database, KeePassHeader header, byte[] aesKey)
throws IOException {
byte[] metaData = new byte[KeePassHeader.VERSION_SIGNATURE_LENGTH + header.getHeaderSize()];
SafeInputStream inputStream = new SafeInputStream(new BufferedInputStream(new ByteArrayInputStream(database)));
inputStream.readSafe(metaData);
byte[] payload = StreamUtils.toByteArray(inputStream);
byte[] processedPayload;
if (encrypt) {
processedPayload = Aes.encrypt(aesKey, header.getEncryptionIV(), payload);
} else {
processedPayload = Aes.decrypt(aesKey, header.getEncryptionIV(), payload);
}
ByteArrayOutputStream output = new ByteArrayOutputStream();
output.write(metaData);
output.write(processedPayload);
return output.toByteArray();
}
|
#vulnerable code
private byte[] processDatabaseEncryption(boolean encrypt, byte[] database, KeePassHeader header, byte[] aesKey)
throws IOException {
byte[] metaData = new byte[KeePassHeader.VERSION_SIGNATURE_LENGTH + header.getHeaderSize()];
BufferedInputStream bufferedInputStream = new BufferedInputStream(new ByteArrayInputStream(database));
bufferedInputStream.read(metaData);
byte[] payload = StreamUtils.toByteArray(bufferedInputStream);
byte[] processedPayload;
if (encrypt) {
processedPayload = Aes.encrypt(aesKey, header.getEncryptionIV(), payload);
} else {
processedPayload = Aes.decrypt(aesKey, header.getEncryptionIV(), payload);
}
ByteArrayOutputStream output = new ByteArrayOutputStream();
output.write(metaData);
output.write(processedPayload);
return output.toByteArray();
}
#location 19
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void whenWritingKeePassFileShouldBeAbleToReadItAgain() throws IOException {
// Read decrypted and write again
FileInputStream fileInputStream = new FileInputStream("target/test-classes/testDatabase_decrypted.xml");
KeePassDatabaseXmlParser parser = new KeePassDatabaseXmlParser();
KeePassFile keePassFile = parser.fromXml(fileInputStream);
ByteArrayOutputStream outputStream = parser.toXml(keePassFile);
OutputStream fileOutputStream = new FileOutputStream("target/test-classes/testDatabase_decrypted2.xml");
outputStream.writeTo(fileOutputStream);
// Read written file
FileInputStream writtenInputStream = new FileInputStream("target/test-classes/testDatabase_decrypted2.xml");
KeePassFile writtenKeePassFile = parser.fromXml(writtenInputStream);
new ProtectedValueProcessor().processProtectedValues(new DecryptionStrategy(Salsa20.createInstance(protectedStreamKey)), writtenKeePassFile);
Assert.assertEquals("Password", writtenKeePassFile.getEntryByTitle("Sample Entry").getPassword());
}
|
#vulnerable code
@Test
public void whenWritingKeePassFileShouldBeAbleToReadItAgain() throws IOException {
// Read decrypted and write again
FileInputStream fileInputStream = new FileInputStream("target/test-classes/testDatabase_decrypted.xml");
KeePassDatabaseXmlParser parser = new KeePassDatabaseXmlParser();
KeePassFile keePassFile = parser.fromXml(fileInputStream, Salsa20.createInstance(protectedStreamKey));
new ProtectedValueProcessor().processProtectedValues(new DecryptionStrategy(Salsa20.createInstance(protectedStreamKey)), keePassFile);
ByteArrayOutputStream outputStream = parser.toXml(keePassFile, Salsa20.createInstance(protectedStreamKey));
OutputStream fileOutputStream = new FileOutputStream("target/test-classes/testDatabase_decrypted2.xml");
outputStream.writeTo(fileOutputStream);
// Read written file
FileInputStream writtenInputStream = new FileInputStream("target/test-classes/testDatabase_decrypted2.xml");
KeePassFile writtenKeePassFile = parser.fromXml(writtenInputStream, Salsa20.createInstance(protectedStreamKey));
new ProtectedValueProcessor().processProtectedValues(new DecryptionStrategy(Salsa20.createInstance(protectedStreamKey)), writtenKeePassFile);
Assert.assertEquals("Password", writtenKeePassFile.getEntryByTitle("Sample Entry").getPassword());
}
#location 19
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public KeePassFile openDatabase(String password) {
try {
byte[] passwordBytes = password.getBytes("UTF-8");
byte[] hashedPassword = Sha256.hash(passwordBytes);
return decryptAndParseDatabase(hashedPassword);
} catch (UnsupportedEncodingException e) {
throw new UnsupportedOperationException("The encoding UTF-8 is not supported");
}
}
|
#vulnerable code
public KeePassFile openDatabase(String password) {
try {
byte[] aesDecryptedDbFile = decrypter.decryptDatabase(password, keepassHeader, keepassFile);
byte[] startBytes = new byte[32];
ByteArrayInputStream decryptedStream = new ByteArrayInputStream(aesDecryptedDbFile);
decryptedStream.read(startBytes);
// compare startBytes
if(!Arrays.equals(keepassHeader.getStreamStartBytes(), startBytes)) {
throw new KeepassDatabaseUnreadable("The keepass database file seems to be corrupt or cannot be decrypted.");
}
HashedBlockInputStream hashedBlockInputStream = new HashedBlockInputStream(decryptedStream);
byte[] hashedBlockBytes = StreamUtils.toByteArray(hashedBlockInputStream);
byte[] decompressed = hashedBlockBytes;
// unzip if necessary
if(keepassHeader.getCompression().equals(CompressionAlgorithm.Gzip)) {
GZIPInputStream gzipInputStream = new GZIPInputStream(new ByteArrayInputStream(hashedBlockBytes));
decompressed = StreamUtils.toByteArray(gzipInputStream);
}
ProtectedStringCrypto protectedStringCrypto;
if(keepassHeader.getCrsAlgorithm().equals(CrsAlgorithm.Salsa20)) {
protectedStringCrypto = Salsa20.createInstance(keepassHeader.getProtectedStreamKey());
}
else {
throw new UnsupportedOperationException("Only Salsa20 is supported as CrsAlgorithm at the moment!");
}
return xmlParser.parse(new ByteArrayInputStream(decompressed), protectedStringCrypto);
} catch (IOException e) {
throw new RuntimeException("Could not open database file", e);
}
}
#location 22
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private byte[] unHashBlockStream(SafeInputStream decryptedStream) throws IOException {
HashedBlockInputStream hashedBlockInputStream = new HashedBlockInputStream(decryptedStream);
return StreamUtils.toByteArray(hashedBlockInputStream);
}
|
#vulnerable code
private byte[] unHashBlockStream(SafeInputStream decryptedStream) throws IOException {
HashedBlockInputStream hashedBlockInputStream = new HashedBlockInputStream(decryptedStream);
byte[] hashedBlockBytes = StreamUtils.toByteArray(hashedBlockInputStream);
return hashedBlockBytes;
}
#location 3
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void whenInputIsKeyFileShouldParseFileAndReturnCorrectData() throws IOException {
FileInputStream fileInputStream = new FileInputStream("target/test-classes/DatabaseWithKeyfile.key");
byte[] keyFileContent = StreamUtils.toByteArray(fileInputStream);
KeyFile keyFile = new KeyFileXmlParser().fromXml(keyFileContent);
Assert.assertEquals("RP+rYNZL4lrGtDMBPzOuctlh3NAutSG5KGsT38C+qPQ=", keyFile.getKey().getData());
}
|
#vulnerable code
@Test
public void whenInputIsKeyFileShouldParseFileAndReturnCorrectData() throws FileNotFoundException {
FileInputStream fileInputStream = new FileInputStream("target/test-classes/DatabaseWithKeyfile.key");
KeyFile keyFile = new KeyFileXmlParser().fromXml(fileInputStream);
Assert.assertEquals("RP+rYNZL4lrGtDMBPzOuctlh3NAutSG5KGsT38C+qPQ=", keyFile.getKey().getData());
}
#location 4
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void whenInputIsValidKeePassXmlShouldParseFileAndReturnCorrectMetadata() throws FileNotFoundException {
KeePassFile keePassFile = parseKeePassXml();
Assert.assertEquals("KeePass", keePassFile.getMeta().getGenerator());
Assert.assertEquals("TestDatabase", keePassFile.getMeta().getDatabaseName());
Assert.assertEquals("Just a sample db", keePassFile.getMeta().getDatabaseDescription());
Assert.assertEquals("2014-11-22 18:59:39", dateFormatter.format(keePassFile.getMeta().getDatabaseNameChanged().getTime()));
Assert.assertEquals("2014-11-22 18:59:39", dateFormatter.format(keePassFile.getMeta().getDatabaseDescriptionChanged().getTime()));
Assert.assertEquals(365, keePassFile.getMeta().getMaintenanceHistoryDays());
Assert.assertEquals(true, keePassFile.getMeta().getRecycleBinEnabled());
Assert.assertEquals("AAAAAAAAAAAAAAAAAAAAAA==", keePassFile.getMeta().getRecycleBinUuid());
Assert.assertEquals("2014-11-22 18:58:56", dateFormatter.format(keePassFile.getMeta().getRecycleBinChanged().getTime()));
Assert.assertEquals(10, keePassFile.getMeta().getHistoryMaxItems());
Assert.assertEquals(6291456, keePassFile.getMeta().getHistoryMaxSize());
}
|
#vulnerable code
@Test
public void whenInputIsValidKeePassXmlShouldParseFileAndReturnCorrectMetadata() throws FileNotFoundException {
FileInputStream fileInputStream = new FileInputStream("target/test-classes/testDatabase_decrypted.xml");
KeePassFile keePassFile = new XmlParser().parse(fileInputStream, Salsa20.createInstance(protectedStreamKey));
Assert.assertEquals("KeePass", keePassFile.getMeta().getGenerator());
Assert.assertEquals("TestDatabase", keePassFile.getMeta().getDatabaseName());
Assert.assertEquals("Just a sample db", keePassFile.getMeta().getDatabaseDescription());
Assert.assertEquals("2014-11-22 18:59:39", dateFormatter.format(keePassFile.getMeta().getDatabaseNameChanged().getTime()));
Assert.assertEquals("2014-11-22 18:59:39", dateFormatter.format(keePassFile.getMeta().getDatabaseDescriptionChanged().getTime()));
Assert.assertEquals(365, keePassFile.getMeta().getMaintenanceHistoryDays());
Assert.assertEquals(true, keePassFile.getMeta().getRecycleBinEnabled());
Assert.assertEquals("AAAAAAAAAAAAAAAAAAAAAA==", keePassFile.getMeta().getRecycleBinUuid());
Assert.assertEquals("2014-11-22 18:58:56", dateFormatter.format(keePassFile.getMeta().getRecycleBinChanged().getTime()));
Assert.assertEquals(10, keePassFile.getMeta().getHistoryMaxItems());
Assert.assertEquals(6291456, keePassFile.getMeta().getHistoryMaxSize());
}
#location 16
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public static void main(String[] args) {
usage1();
//usage2();
}
|
#vulnerable code
public static void main(String[] args) {
String allExtractRegularUrl = "http://localhost:8080/HtmlExtractorServer/api/all_extract_regular.jsp";
String redisHost = "localhost";
int redisPort = 6379;
HtmlExtractor htmlExtractor = HtmlExtractor.getInstance(allExtractRegularUrl, redisHost, redisPort);
String url = "http://money.163.com/08/1219/16/4THR2TMP002533QK.html";
List<ExtractResult> extractResults = htmlExtractor.extract(url, "gb2312");
int i = 1;
for (ExtractResult extractResult : extractResults) {
System.out.println((i++) + "、网页 " + extractResult.getUrl() + " 的抽取结果");
for(ExtractResultItem extractResultItem : extractResult.getExtractResultItems()){
System.out.print("\t"+extractResultItem.getField()+" = "+extractResultItem.getValue());
}
System.out.println("\tdescription = "+extractResult.getDescription());
System.out.println("\tkeywords = "+extractResult.getKeywords());
}
}
#location 6
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public static void main(String[] args) {
//下面的三种方法代表了3种不同的使用模式,只能单独使用
//usage1();
usage2();
//usage3();
}
|
#vulnerable code
public static void main(String[] args) {
//下面的三种方法代表了3种不同的使用模式,只能单独使用
//usage1();
//usage2();
usage3();
}
#location 5
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public ValueBlock filter(PositionBlock positions)
{
return MaskedValueBlock.maskBlock(this, positions);
}
|
#vulnerable code
@Override
public ValueBlock filter(PositionBlock positions)
{
// find selected positions
Set<Integer> indexes = new HashSet<>();
for (long position : positions.getPositions()) {
if (range.contains(position)) {
indexes.add((int) (position - range.lowerEndpoint()));
}
}
// if no positions are selected, we are done
if (indexes.isEmpty()) {
return EmptyValueBlock.INSTANCE;
}
// build a buffer containing only the tuples from the selected positions
DynamicSliceOutput sliceOutput = new DynamicSliceOutput(1024);
int currentOffset = 0;
for (int index = 0; index < getCount(); ++index) {
Slice currentPositionToEnd = slice.slice(currentOffset, slice.length() - currentOffset);
int size = tupleInfo.size(currentPositionToEnd);
// only write selected tuples
if (indexes.contains(index)) {
sliceOutput.writeBytes(slice, currentOffset, size);
}
currentOffset += size;
}
// todo what is the start position
return new UncompressedValueBlock(Ranges.closed(0L, (long) indexes.size() - 1), tupleInfo, sliceOutput.slice());
}
#location 35
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private Tuple createTuple(String value)
{
TupleInfo tupleInfo = new TupleInfo(VARIABLE_BINARY);
Tuple tuple = tupleInfo.builder()
.append(Slices.wrappedBuffer(value.getBytes(UTF_8)))
.build();
return tuple;
}
|
#vulnerable code
private Tuple createTuple(String value)
{
byte[] bytes = value.getBytes(UTF_8);
Slice slice = Slices.allocate(bytes.length + SIZE_OF_SHORT);
slice.output()
.appendShort(bytes.length + 2)
.appendBytes(bytes);
return new Tuple(slice, new TupleInfo(VARIABLE_BINARY));
}
#location 7
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testCountAll()
{
List<Tuple> expected = computeExpected("SELECT COUNT(*) FROM orders", FIXED_INT_64);
TupleStream orders = createTupleStream(ordersData, Column.ORDER_ORDERKEY, FIXED_INT_64);
AggregationOperator aggregation = new AggregationOperator(orders, CountAggregation.PROVIDER);
assertEqualsIgnoreOrder(tuples(aggregation), expected);
}
|
#vulnerable code
@Test
public void testCountAll()
{
List<Tuple> expected = computeExpected("SELECT COUNT(*) FROM orders", FIXED_INT_64);
TupleStream orders = createBlockStream(ordersData, Column.ORDER_ORDERKEY, FIXED_INT_64);
AggregationOperator aggregation = new AggregationOperator(orders, CountAggregation.PROVIDER);
assertEqualsIgnoreOrder(tuples(aggregation), expected);
}
#location 9
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testCountAllWithComparison()
{
List<Tuple> expected = computeExpected("SELECT COUNT(*) FROM lineitem WHERE tax < discount", FIXED_INT_64);
TupleStream discount = createTupleStream(lineitemData, Column.LINEITEM_DISCOUNT, DOUBLE);
TupleStream tax = createTupleStream(lineitemData, Column.LINEITEM_TAX, DOUBLE);
ComparisonOperator comparison = new ComparisonOperator(tax, discount, new DoubleLessThanComparison());
AggregationOperator aggregation = new AggregationOperator(comparison, CountAggregation.PROVIDER);
assertEqualsIgnoreOrder(tuples(aggregation), expected);
}
|
#vulnerable code
@Test
public void testCountAllWithComparison()
{
List<Tuple> expected = computeExpected("SELECT COUNT(*) FROM lineitem WHERE tax < discount", FIXED_INT_64);
TupleStream discount = createBlockStream(lineitemData, Column.LINEITEM_DISCOUNT, DOUBLE);
TupleStream tax = createBlockStream(lineitemData, Column.LINEITEM_TAX, DOUBLE);
ComparisonOperator comparison = new ComparisonOperator(tax, discount, new DoubleLessThanComparison());
AggregationOperator aggregation = new AggregationOperator(comparison, CountAggregation.PROVIDER);
assertEqualsIgnoreOrder(tuples(aggregation), expected);
}
#location 12
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void writeTo(UncompressedBlock block,
Class<?> type,
Type genericType,
Annotation[] annotations,
MediaType mediaType,
MultivaluedMap<String, Object> httpHeaders,
OutputStream output)
throws IOException, WebApplicationException
{
Slice slice = block.getSlice();
new OutputStreamSliceOutput(output)
.appendInt(slice.length())
.appendInt(block.getCount())
.appendLong(block.getRange().getStart())
.appendBytes(slice);
}
|
#vulnerable code
@Override
public void writeTo(UncompressedBlock block,
Class<?> type,
Type genericType,
Annotation[] annotations,
MediaType mediaType,
MultivaluedMap<String, Object> httpHeaders,
OutputStream output)
throws IOException, WebApplicationException
{
Slice slice = block.getSlice();
// write header
ByteArraySlice header = Slices.allocate(SIZE_OF_INT + SIZE_OF_INT + SIZE_OF_LONG);
header.output()
.appendInt(slice.length())
.appendInt(block.getCount())
.appendLong(block.getRange().getStart());
output.write(header.getRawArray());
// write slice
slice.getBytes(0, output, slice.length());
}
#location 18
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testSelectWithComparison()
{
List<Tuple> expected = computeExpected("SELECT orderkey FROM lineitem WHERE tax < discount", FIXED_INT_64);
RowSourceBuilder orderKey = createTupleStream(lineitemData, Column.LINEITEM_ORDERKEY, FIXED_INT_64);
TupleStream discount = createTupleStream(lineitemData, Column.LINEITEM_DISCOUNT, DOUBLE);
TupleStream tax = createTupleStream(lineitemData, Column.LINEITEM_TAX, DOUBLE);
ComparisonOperator comparison = new ComparisonOperator(tax, discount, new DoubleLessThanComparison());
FilterOperator result = new FilterOperator(orderKey.getTupleInfo(), orderKey, comparison);
assertEqualsIgnoreOrder(tuples(result), expected);
}
|
#vulnerable code
@Test
public void testSelectWithComparison()
{
List<Tuple> expected = computeExpected("SELECT orderkey FROM lineitem WHERE tax < discount", FIXED_INT_64);
RowSourceBuilder orderKey = createBlockStream(lineitemData, Column.LINEITEM_ORDERKEY, FIXED_INT_64);
TupleStream discount = createBlockStream(lineitemData, Column.LINEITEM_DISCOUNT, DOUBLE);
TupleStream tax = createBlockStream(lineitemData, Column.LINEITEM_TAX, DOUBLE);
ComparisonOperator comparison = new ComparisonOperator(tax, discount, new DoubleLessThanComparison());
FilterOperator result = new FilterOperator(orderKey.getTupleInfo(), orderKey, comparison);
assertEqualsIgnoreOrder(tuples(result), expected);
}
#location 13
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testCreatePipelineAggregatedSharedTask() throws Exception {
FreeStyleProject build1 = jenkins.createFreeStyleProject("build1");
FreeStyleProject build2 = jenkins.createFreeStyleProject("build2");
FreeStyleProject sonar = jenkins.createFreeStyleProject("sonar1");
build1.getPublishersList().add(new BuildTrigger("sonar1", true));
build2.getPublishersList().add(new BuildTrigger("sonar1", true));
build1.save();
build2.save();
jenkins.getInstance().rebuildDependencyGraph();
jenkins.setQuietPeriod(0);
PipelineFactory factory = new PipelineFactory();
final Pipeline pipe1 = factory.extractPipeline("pipe1", build1);
final Pipeline pipe2 = factory.extractPipeline("pipe2", build2);
Pipeline aggregated1 = factory.createPipelineAggregated(pipe1);
Pipeline aggregated2 = factory.createPipelineAggregated(pipe2);
assertNull(aggregated1.getStages().get(0).getVersion());
assertNull(aggregated2.getStages().get(0).getVersion());
assertTrue(aggregated1.getStages().get(0).getTasks().get(0).getStatus().isIdle());
assertTrue(aggregated2.getStages().get(0).getTasks().get(0).getStatus().isIdle());
jenkins.buildAndAssertSuccess(build1);
jenkins.waitUntilNoActivity();
assertNotNull(sonar.getLastBuild());
assertEquals(pipe1.getStages().size(), 2);
assertEquals(pipe2.getStages().size(), 2);
assertNotNull(sonar.getBuild("1"));
aggregated1 = factory.createPipelineAggregated(pipe1);
aggregated2 = factory.createPipelineAggregated(pipe2);
assertEquals("#1", aggregated1.getStages().get(1).getVersion());
assertEquals(jenkins.getInstance().getRootUrl() + "job/sonar1/1/", aggregated1.getStages().get(1).getTasks().get(0).getLink());
assertEquals(true, aggregated2.getStages().get(1).getTasks().get(0).getStatus().isIdle());
assertEquals(jenkins.getInstance().getRootUrl() + "job/sonar1/", aggregated2.getStages().get(1).getTasks().get(0).getLink());
jenkins.buildAndAssertSuccess(build2);
jenkins.waitUntilNoActivity();
aggregated1 = factory.createPipelineAggregated(pipe1);
aggregated2 = factory.createPipelineAggregated(pipe2);
assertEquals("#1", aggregated1.getStages().get(1).getVersion());
assertEquals("#1", aggregated2.getStages().get(1).getVersion());
assertEquals(true, aggregated2.getStages().get(1).getTasks().get(0).getStatus().isSuccess());
assertEquals(jenkins.getInstance().getRootUrl() + "job/sonar1/2/", aggregated2.getStages().get(1).getTasks().get(0).getLink());
jenkins.buildAndAssertSuccess(build1);
jenkins.waitUntilNoActivity();
aggregated1 = factory.createPipelineAggregated(pipe1);
aggregated2 = factory.createPipelineAggregated(pipe2);
assertEquals("#2", aggregated1.getStages().get(1).getVersion());
assertEquals("#1", aggregated2.getStages().get(1).getVersion());
assertEquals(jenkins.getInstance().getRootUrl() + "job/sonar1/3/", aggregated1.getStages().get(1).getTasks().get(0).getLink());
assertEquals(jenkins.getInstance().getRootUrl() + "job/sonar1/2/", aggregated2.getStages().get(1).getTasks().get(0).getLink());
}
|
#vulnerable code
@Test
public void testCreatePipelineAggregatedSharedTask() throws Exception {
FreeStyleProject build1 = jenkins.createFreeStyleProject("build1");
FreeStyleProject build2 = jenkins.createFreeStyleProject("build2");
FreeStyleProject sonar = jenkins.createFreeStyleProject("sonar1");
build1.getPublishersList().add(new BuildTrigger("sonar1", true));
build2.getPublishersList().add(new BuildTrigger("sonar1", true));
build1.save();
build2.save();
jenkins.getInstance().rebuildDependencyGraph();
jenkins.setQuietPeriod(0);
jenkins.buildAndAssertSuccess(build1);
jenkins.waitUntilNoActivity();
assertNotNull(sonar.getLastBuild());
PipelineFactory factory = new PipelineFactory();
final Pipeline pipe1 = factory.extractPipeline("pipe1", build1);
final Pipeline pipe2 = factory.extractPipeline("pipe2", build2);
assertEquals(pipe1.getStages().size(), 2);
assertEquals(pipe2.getStages().size(), 2);
assertNotNull(sonar.getBuild("1"));
Pipeline aggregated1 = factory.createPipelineAggregated(pipe1);
Pipeline aggregated2 = factory.createPipelineAggregated(pipe2);
assertEquals("#1", aggregated1.getStages().get(1).getVersion());
assertEquals(true, aggregated2.getStages().get(1).getStatus().isIdle());
jenkins.buildAndAssertSuccess(build2);
jenkins.waitUntilNoActivity();
Pipeline aggregated3 = factory.createPipelineAggregated(pipe1);
Pipeline aggregated4 = factory.createPipelineAggregated(pipe2);
assertEquals("#1", aggregated3.getStages().get(1).getVersion());
assertEquals("#1", aggregated4.getStages().get(1).getVersion());
}
#location 27
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public Pipeline getPipeline() {
AbstractProject first = Jenkins.getInstance().getItem(firstJob, Jenkins.getInstance(), AbstractProject.class);
AbstractBuild prevBuild = null;
List<Stage> stages = newArrayList();
for (AbstractProject job : getAllDownstreamJobs(first)) {
AbstractBuild build = job.getLastBuild();
Task task;
if (stages.isEmpty() || build != null && build.equals(getDownstreamBuild(job, prevBuild))) {
Status status = build != null? resolveStatus(build): idle();
task = new Task(job.getDisplayName(), status);
prevBuild = build;
} else {
task = new Task(job.getDisplayName(), idle());
prevBuild = null;
}
Stage stage = new Stage(job.getDisplayName(), singletonList(task));
stages.add(stage);
}
return new Pipeline(title, stages);
}
|
#vulnerable code
public Pipeline getPipeline() {
AbstractProject first = Jenkins.getInstance().getItem(firstJob, Jenkins.getInstance(), AbstractProject.class);
AbstractBuild prevBuild = null;
List<Stage> stages = newArrayList();
boolean isFirst = true;
for (AbstractProject job : getAllDownstreamJobs(first)) {
AbstractBuild build = job.getLastBuild();
Task task;
if (isFirst || build.equals(getDownstreamBuild(job, prevBuild))) {
Status status = resolveStatus(build);
if (status == Status.RUNNING) {
task = new Task(job.getDisplayName(), status, (int) Math.round((double) (System.currentTimeMillis() - build.getTimestamp().getTimeInMillis()) / build.getEstimatedDuration() * 100.0));
} else {
task = new Task(job.getDisplayName(), status, 100);
}
prevBuild = build;
} else {
task = new Task(job.getDisplayName(), Status.NOTRUNNED, 0);
prevBuild = null;
}
Stage stage = new Stage(job.getDisplayName(), singletonList(task));
stages.add(stage);
isFirst = false;
}
return new Pipeline(title, stages);
}
#location 10
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public Pipeline createPipelineAggregated(Pipeline pipeline) {
List<Stage> stages = new ArrayList<>();
for (Stage stage : pipeline.getStages()) {
List<Task> tasks = new ArrayList<>();
AbstractBuild firstTask = getJenkinsJob(stage.getTasks().get(0)).getLastBuild();
AbstractBuild versionBuild = getFirstUpstreamBuild(firstTask);
String version = null;
if (versionBuild != null) {
version = versionBuild.getDisplayName();
}
for (Task task : stage.getTasks()) {
AbstractProject job = getJenkinsJob(task);
AbstractBuild currentBuild = match(job.getBuilds(), versionBuild);
if (currentBuild != null) {
tasks.add(new Task(task.getId(), task.getName(), resolveStatus(job, currentBuild), Jenkins.getInstance().getRootUrl() + currentBuild.getUrl(), getTestResult(currentBuild)));
} else {
tasks.add(new Task(task.getId(), task.getName(), StatusFactory.idle(), task.getLink(), null));
}
}
stages.add(new Stage(stage.getName(), tasks, version));
}
//TODO add triggeredBy
return new Pipeline(pipeline.getName(), null, null, stages);
}
|
#vulnerable code
public Pipeline createPipelineAggregated(Pipeline pipeline) {
List<Stage> stages = new ArrayList<>();
for (Stage stage : pipeline.getStages()) {
List<Task> tasks = new ArrayList<>();
AbstractBuild firstTask = getJenkinsJob(stage.getTasks().get(0)).getLastBuild();
AbstractBuild versionBuild = getFirstUpstreamBuild(firstTask);
String version = versionBuild.getDisplayName();
for (Task task : stage.getTasks()) {
AbstractProject job = getJenkinsJob(task);
AbstractBuild currentBuild = match(job.getBuilds(), versionBuild);
if (currentBuild != null) {
tasks.add(new Task(task.getId(), task.getName(), resolveStatus(job, currentBuild), Jenkins.getInstance().getRootUrl() + currentBuild.getUrl(), getTestResult(currentBuild)));
} else {
tasks.add(new Task(task.getId(), task.getName(), StatusFactory.idle(), task.getLink(), null));
}
}
stages.add(new Stage(stage.getName(), tasks, version));
}
//TODO add triggeredBy
return new Pipeline(pipeline.getName(), null, null, stages);
}
#location 10
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Before
public void setUp() throws Exception {
String content = File2String.read("appmsg-file.xml");
WechatMessage m = new WechatMessage();
m.Content = content;
handler = new AppMsgXmlHandler(m);
}
|
#vulnerable code
@Before
public void setUp() throws Exception {
String content = File2String.read("appmsg-file.xml");
handler = new AppMsgXmlHandler(content);
}
#location 4
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testGetRecents() {
AppMsgInfo info = handler.decode();
Assert.assertEquals("南京abc.xlsx", info.title);
System.out.println(info);
WechatMessage m = new WechatMessage();
m.Content = File2String.read("appmsg-publisher.xml");
handler = new AppMsgXmlHandler(m);
info = handler.decode();
Assert.assertEquals("谷歌开发者", info.appName);
System.out.println(info);
}
|
#vulnerable code
@Test
public void testGetRecents() {
AppMsgInfo info = handler.decode();
Assert.assertEquals("南京abc.xlsx", info.title);
System.out.println(info);
handler = new AppMsgXmlHandler(
File2String.read("appmsg-publisher.xml"));
info = handler.decode();
Assert.assertEquals("谷歌开发者", info.appName);
System.out.println(info);
}
#location 8
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void parseContent() {
if (MsgType == WechatMessage.MSGTYPE_EMOTICON) {
text = new EmojiMsgXmlHandler(this).getHtml(getMediaLink());
}
else if (MsgType == WechatMessage.MSGTYPE_IMAGE) {
text = new ImageMsgXmlHandler(this).getHtml(getMediaLink(), this);
}
else if (MsgType == WechatMessage.MSGTYPE_APP) {
text = new AppMsgXmlHandler(this).getHtml(getMediaLink());
}
// else if (MsgType == WechatMessage.MSGTYPE_FILE) {
// text = new FileMsgXmlHandler(temp).getHtml(getMediaLink());
// }
else if (MsgType == WechatMessage.MSGTYPE_VIDEO) {
text = "视频消息(请在手机上查看)";
}
else if (MsgType == WechatMessage.MSGTYPE_VOICE) {
text = "语音消息(请在手机上查看)";
}
else if (MsgType == WechatMessage.MSGTYPE_MICROVIDEO) {
text = "小视频(请在手机上查看)";
}
else if (MsgType == WechatMessage.MSGTYPE_VERIFYMSG) {
text = "验证消息(请在手机上查看)";
}
else if (MsgType == WechatMessage.MSGTYPE_VOIPINVITE) {
text = "视频邀请消息(请在手机上查看)";
}
else {
String temp = StringUtils.decodeXml(Content);
text = temp;
}
}
|
#vulnerable code
public void parseContent() {
String temp = StringUtils.decodeXml(Content);
if (MsgType == WechatMessage.MSGTYPE_EMOTICON) {
text = new EmojiMsgXmlHandler(temp).getHtml(getMediaLink(), this);
}
else if (MsgType == WechatMessage.MSGTYPE_IMAGE) {
text = new ImageMsgXmlHandler(temp).getHtml(getMediaLink(), this);
}
else if (MsgType == WechatMessage.MSGTYPE_APP) {
text = new AppMsgXmlHandler(temp).getHtml(getMediaLink(), this);
}
// else if (MsgType == WechatMessage.MSGTYPE_FILE) {
// text = new FileMsgXmlHandler(temp).getHtml(getMediaLink());
// }
else if (MsgType == WechatMessage.MSGTYPE_VIDEO) {
text = "视频消息(请在手机上查看)";
}
else if (MsgType == WechatMessage.MSGTYPE_VOICE) {
text = "语音消息(请在手机上查看)";
}
else if (MsgType == WechatMessage.MSGTYPE_MICROVIDEO) {
text = "小视频(请在手机上查看)";
}
else if (MsgType == WechatMessage.MSGTYPE_VERIFYMSG) {
text = "验证消息(请在手机上查看)";
}
else if (MsgType == WechatMessage.MSGTYPE_VOIPINVITE) {
text = "视频邀请消息(请在手机上查看)";
}
else {
text = temp;
}
}
#location 4
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public Map<String, GssFunction> get() {
return GssFunctions.getFunctionMap();
}
|
#vulnerable code
public Map<String, GssFunction> get() {
return new ImmutableMap.Builder<String, GssFunction>()
// Arithmetic functions.
.put("add", new GssFunctions.AddToNumericValue())
.put("sub", new GssFunctions.SubtractFromNumericValue())
.put("mult", new GssFunctions.Mult())
// Not named "div" so it will not be confused with the HTML element.
.put("divide", new GssFunctions.Div())
.put("min", new GssFunctions.MinValue())
.put("max", new GssFunctions.MaxValue())
// Color functions.
.put("blendColors", new BlendColors())
.put("blendColorsRgb", new BlendColorsRGB())
.put("makeMutedColor", new MakeMutedColor())
.put("addHsbToCssColor", new AddHsbToCssColor())
.put("makeContrastingColor", new MakeContrastingColor())
.put("addToNumericValue", new AddToNumericValue())
.put("subtractFromNumericValue", new SubtractFromNumericValue())
.put("adjustBrightness", new AdjustBrightness())
// Logic functions.
.put("selectFrom", new SelectFrom())
.build();
}
#location 1
#vulnerability type CHECKERS_IMMUTABLE_CAST
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public ZooKeeper getClient(){
if (zooKeeper==null) {
try {
if (INSTANCE_INIT_LOCK.tryLock(2, TimeUnit.SECONDS)) {
if (zooKeeper==null) { // 二次校验,防止并发创建client
// init new-client
ZooKeeper newZk = null;
try {
newZk = new ZooKeeper(zkaddress, 10000, watcher);
if (zkdigest!=null && zkdigest.trim().length()>0) {
newZk.addAuthInfo("digest",zkdigest.getBytes()); // like "account:password"
}
newZk.exists(zkpath, false); // sync wait until succcess conn
// set success new-client
zooKeeper = newZk;
logger.info(">>>>>>>>>> xxl-rpc, XxlZkClient init success.");
} catch (Exception e) {
// close fail new-client
if (newZk != null) {
newZk.close();
}
logger.error(e.getMessage(), e);
} finally {
INSTANCE_INIT_LOCK.unlock();
}
}
}
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
if (zooKeeper == null) {
throw new XxlRpcException("XxlZkClient.zooKeeper is null.");
}
return zooKeeper;
}
|
#vulnerable code
public ZooKeeper getClient(){
if (zooKeeper==null) {
try {
if (INSTANCE_INIT_LOCK.tryLock(2, TimeUnit.SECONDS)) {
if (zooKeeper==null) { // 二次校验,防止并发创建client
try {
zooKeeper = new ZooKeeper(zkaddress, 10000, watcher); // TODO,本地变量方式,成功才会赋值
if (zkdigest!=null && zkdigest.trim().length()>0) {
zooKeeper.addAuthInfo("digest",zkdigest.getBytes()); // like "account:password"
}
zooKeeper.exists(zkpath, false); // sync
} catch (Exception e) {
logger.error(e.getMessage(), e);
} finally {
INSTANCE_INIT_LOCK.unlock();
}
logger.info(">>>>>>>>>> xxl-rpc, XxlZkClient init success.");
}
}
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
}
if (zooKeeper == null) {
throw new XxlRpcException("XxlZkClient.zooKeeper is null.");
}
return zooKeeper;
}
#location 2
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private static String getAddress() {
if (LOCAL_ADDRESS != null) {
return LOCAL_ADDRESS;
}
InetAddress localAddress = getFirstValidAddress();
LOCAL_ADDRESS = localAddress != null ? localAddress.getHostAddress() : null;
return LOCAL_ADDRESS;
}
|
#vulnerable code
private static String getAddress() {
if (LOCAL_ADDRESS != null) {
return LOCAL_ADDRESS;
}
InetAddress localAddress = getFirstValidAddress();
LOCAL_ADDRESS = localAddress.getHostAddress();
return LOCAL_ADDRESS;
}
#location 6
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private void resetCorrectOffsets() {
consumer.pause(consumer.assignment());
Map<String, List<PartitionInfo>> topicInfos = consumer.listTopics();
Set<String> topics = topicInfos.keySet();
List<String> expectTopics = new ArrayList<>(topicHandlers.keySet());
List<PartitionInfo> patitions = null;
for (String topic : topics) {
if(!expectTopics.contains(topic))continue;
patitions = topicInfos.get(topic);
for (PartitionInfo partition : patitions) {
try {
//期望的偏移
long expectOffsets = consumerContext.getLatestProcessedOffsets(topic, partition.partition());
//
TopicPartition topicPartition = new TopicPartition(topic, partition.partition());
OffsetAndMetadata metadata = consumer.committed(new TopicPartition(partition.topic(), partition.partition()));
if(expectOffsets >= 0){
if(expectOffsets < metadata.offset()){
consumer.seek(topicPartition, expectOffsets);
logger.info("seek Topic[{}] partition[{}] from {} to {}",topic, partition.partition(),metadata.offset(),expectOffsets);
}
}
} catch (Exception e) {
logger.warn("try seek topic["+topic+"] partition["+partition.partition()+"] offsets error");
}
}
}
consumer.resume(consumer.assignment());
}
|
#vulnerable code
private void resetCorrectOffsets() {
KafkaConsumerCommand consumerCommand = new KafkaConsumerCommand(consumerContext.getProperties().getProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG));
try {
List<TopicInfo> topicInfos = consumerCommand.consumerGroup(consumerContext.getGroupId()).getTopics();
for (TopicInfo topic : topicInfos) {
List<TopicPartitionInfo> partitions = topic.getPartitions();
for (TopicPartitionInfo partition : partitions) {
try {
//期望的偏移
long expectOffsets = consumerContext.getLatestProcessedOffsets(topic.getTopicName(), partition.getPartition());
//
if(expectOffsets < partition.getOffset()){
consumer.seek(new TopicPartition(topic.getTopicName(), partition.getPartition()), expectOffsets);
logger.info("seek Topic[{}] partition[{}] from {} to {}",topic.getTopicName(), partition.getPartition(),partition.getOffset(),expectOffsets);
}
} catch (Exception e) {
logger.warn("try seek topic["+topic.getTopicName()+"] partition["+partition.getPartition()+"] offsets error",e);
}
}
}
} catch (Exception e) {
logger.warn("KafkaConsumerCommand.consumerGroup("+consumerContext.getGroupId()+") error",e);
}
consumerCommand.close();
}
#location 5
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void start() {
createKafkaConsumer();
//按主题数创建ConsumerWorker线程
for (int i = 0; i < topicHandlers.size(); i++) {
ConsumerWorker consumer = new ConsumerWorker();
consumerWorks.add(consumer);
fetcheExecutor.submit(consumer);
}
}
|
#vulnerable code
@Override
public void start() {
for (int i = 0; i < topicHandlers.size(); i++) {
ConsumerWorker<String, DefaultMessage> consumer = new ConsumerWorker<>(configs, topicHandlers,processExecutor);
consumers.add(consumer);
fetcheExecutor.submit(consumer);
}
}
#location 7
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@SuppressWarnings({ "rawtypes", "unchecked" })
@Override
public Object onInterceptor(Invocation invocation) throws Throwable {
final Executor executor = (Executor) invocation.getTarget();
final Object[] args = invocation.getArgs();
final MappedStatement orignMappedStatement = (MappedStatement)args[0];
if(!orignMappedStatement.getSqlCommandType().equals(SqlCommandType.SELECT))return null;
if(!pageMappedStatements.keySet().contains(orignMappedStatement.getId()))return null;
final RowBounds rowBounds = (RowBounds) args[2];
final ResultHandler resultHandler = (ResultHandler) args[3];
final Object parameter = args[1];
PageParams pageParams = PageExecutor.getPageParams();
if(pageParams == null && pageMappedStatements.get(orignMappedStatement.getId())){
if(parameter instanceof Map){
Collection parameterValues = ((Map)parameter).values();
for (Object val : parameterValues) {
if(val instanceof PageParams){
pageParams = (PageParams) val;
break;
}
}
}else{
pageParams = (PageParams) parameter;
}
}
if(pageParams == null)return null;
BoundSql boundSql = orignMappedStatement.getBoundSql(parameter);
//查询总数
MappedStatement countMappedStatement = getCountMappedStatement(orignMappedStatement);
Long total = executeQueryCount(executor, countMappedStatement, parameter, boundSql, rowBounds, resultHandler);
//按分页查询
MappedStatement limitMappedStatement = getLimitMappedStatementIfNotCreate(orignMappedStatement);
boundSql = limitMappedStatement.getBoundSql(parameter);
boundSql.setAdditionalParameter(PARAMETER_OFFSET, pageParams.getOffset());
boundSql.setAdditionalParameter(PARAMETER_SIZE, pageParams.getPageSize());
List<?> datas = executor.query(limitMappedStatement, parameter, RowBounds.DEFAULT, resultHandler,null,boundSql);
Page<Object> page = new Page<Object>(pageParams,total,(List<Object>) datas);
List<Page<?>> list = new ArrayList<Page<?>>(1);
list.add(page);
return list;
}
|
#vulnerable code
@SuppressWarnings({ "rawtypes", "unchecked" })
@Override
public Object onInterceptor(Invocation invocation) throws Throwable {
final Executor executor = (Executor) invocation.getTarget();
final Object[] args = invocation.getArgs();
final RowBounds rowBounds = (RowBounds) args[2];
final ResultHandler resultHandler = (ResultHandler) args[3];
final MappedStatement orignMappedStatement = (MappedStatement)args[0];
final Object parameter = args[1];
if(!orignMappedStatement.getSqlCommandType().equals(SqlCommandType.SELECT))return null;
if(!pageMappedStatements.keySet().contains(orignMappedStatement.getId()))return null;
PageParams pageParams = PageExecutor.getPageParams();
if(pageParams == null && pageMappedStatements.get(orignMappedStatement.getId())){
if(parameter instanceof Map){
Collection parameterValues = ((Map)parameter).values();
for (Object val : parameterValues) {
if(val instanceof PageParams){
pageParams = (PageParams) val;
break;
}
}
}else{
pageParams = (PageParams) parameter;
}
}
if(pageParams == null)return null;
BoundSql boundSql = orignMappedStatement.getBoundSql(parameter);
//查询总数
MappedStatement countMappedStatement = getCountMappedStatement(orignMappedStatement);
Long total = executeQueryCount(executor, countMappedStatement, parameter, boundSql, rowBounds, resultHandler);
//按分页查询
MappedStatement limitMappedStatement = getLimitMappedStatementIfNotCreate(orignMappedStatement);
boundSql = limitMappedStatement.getBoundSql(parameter);
boundSql.setAdditionalParameter(PARAMETER_OFFSET, pageParams.getOffset());
boundSql.setAdditionalParameter(PARAMETER_SIZE, pageParams.getPageSize());
List<?> datas = executor.query(limitMappedStatement, parameter, RowBounds.DEFAULT, resultHandler,null,boundSql);
Page<Object> page = new Page<Object>(pageParams,total,(List<Object>) datas);
List<Page<?>> list = new ArrayList<Page<?>>(1);
list.add(page);
return list;
}
#location 42
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void setRuning(String jobName, Date fireTime) {
updatingStatus = false;
try {
JobConfig config = getConf(jobName,false);
config.setRunning(true);
config.setLastFireTime(fireTime);
config.setModifyTime(Calendar.getInstance().getTimeInMillis());
config.setErrorMsg(null);
//更新本地
schedulerConfgs.put(jobName, config);
try {
if(zkAvailabled)zkClient.writeData(getPath(config), JsonUtils.toJson(config));
} catch (Exception e) {
checkZkAvailabled();
logger.warn(String.format("Job[{}] setRuning error...", jobName),e);
}
} finally {
updatingStatus = false;
}
}
|
#vulnerable code
@Override
public void setRuning(String jobName, Date fireTime) {
updatingStatus = false;
try {
JobConfig config = getConf(jobName,false);
config.setRunning(true);
config.setLastFireTime(fireTime);
config.setCurrentNodeId(JobContext.getContext().getNodeId());
config.setModifyTime(Calendar.getInstance().getTimeInMillis());
config.setErrorMsg(null);
//更新本地
schedulerConfgs.put(jobName, config);
try {
if(zkAvailabled)zkClient.writeData(getPath(config), JsonUtils.toJson(config));
} catch (Exception e) {
checkZkAvailabled();
logger.warn(String.format("Job[{}] setRuning error...", jobName),e);
}
} finally {
updatingStatus = false;
}
}
#location 8
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
protected XInputStream open(String path) throws Exception {
//
final XInputStream is = new XInputStreamImpl(new FileInputStream(path));
try {
// Check binlog magic
final byte[] magic = is.readBytes(MySQLConstants.BINLOG_MAGIC.length);
if(!CodecUtils.equals(magic, MySQLConstants.BINLOG_MAGIC)) {
throw new NestableRuntimeException("invalid binlog magic, file: " + path);
}
//
if(this.startPosition > MySQLConstants.BINLOG_MAGIC.length) {
is.skip(this.startPosition - MySQLConstants.BINLOG_MAGIC.length);
}
return is;
} catch(Exception e) {
IOUtils.closeQuietly(is);
throw e;
}
}
|
#vulnerable code
protected XInputStream open(String path) throws Exception {
//
final RandomAccessFile file = new RandomAccessFile(path, "r");
final XInputStream is = new XInputStreamImpl(new RamdomAccessFileInputStream(file));
try {
// Check binlog magic
final byte[] magic = is.readBytes(MySQLConstants.BINLOG_MAGIC.length);
if(!CodecUtils.equals(magic, MySQLConstants.BINLOG_MAGIC)) {
throw new NestableRuntimeException("invalid binlog magic, file: " + path);
}
//
if(this.startPosition > MySQLConstants.BINLOG_MAGIC.length) {
is.skip(this.startPosition - MySQLConstants.BINLOG_MAGIC.length);
}
return is;
} catch(Exception e) {
IOUtils.closeQuietly(is);
throw e;
}
}
#location 18
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public static void main(String[] args) throws IOException {
String file = "src/test/resources/jansi.ans";
if( args.length>0 )
file = args[0];
// Allows us to disable ANSI processing.
if( "true".equals(System.getProperty("jansi", "true")) ) {
AnsiConsole.systemInstall();
}
PrintStream out = System.out;
FileInputStream f = new FileInputStream(file);
int c;
while( (c=f.read())>=0 ) {
out.write(c);
}
f.close();
}
|
#vulnerable code
public static void main(String[] args) throws IOException {
AnsiConsole.systemInstall();
PrintStream out = System.out;
FileInputStream f = new FileInputStream("src/test/resources/jansi.ans");
int c;
while( (c=f.read())>=0 ) {
out.write(c);
}
f.close();
}
#location 8
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
Client(String user_id, String license_key) {
this.user_id = user_id;
this.license_key = license_key;
}
|
#vulnerable code
Country Country(String ip_address) {
DefaultHttpClient httpclient = new DefaultHttpClient();
try {
HttpGet httpget = new HttpGet("https://geoip.maxmind.com/geoip/country/" + ip_address);
httpget.addHeader("Accept","application/json");
httpget.addHeader(BasicScheme.authenticate(
new UsernamePasswordCredentials(user_id,license_key),"UTF-8",false));
HttpResponse response = httpclient.execute(httpget);
HttpEntity entity = response.getEntity();
if (entity != null) {
InputStream instream = entity.getContent();
BufferedReader reader = new BufferedReader(
new InputStreamReader(instream));
return new Country(reader.readLine());
}
} catch (IOException e) {
e.printStackTrace();
} finally {
httpclient.getConnectionManager().shutdown();
}
return null;
}
#location 19
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public static void main( String[] args )
{
String user_id = args[0];
String license_key = args[1];
String ip_address = args[2];
Client cl = new Client(user_id,license_key);
Country c = cl.Country(ip_address);
System.out.println(c.get_country_name("en"));
}
|
#vulnerable code
public static void main( String[] args )
{
try {
String user_id = args[0];
String license_key = args[1];
String ip_address = args[2];
Client cl = new Client(user_id,license_key);
JSONObject o = cl.Country(ip_address);
o = o.getJSONObject("country");
o = o.getJSONObject("name");
String name = o.getString("en");
System.out.println(name);
} catch (JSONException e) {
e.printStackTrace();
}
}
#location 9
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
Client(String user_id, String license_key) {
this.user_id = user_id;
this.license_key = license_key;
}
|
#vulnerable code
Country Country(String ip_address) {
DefaultHttpClient httpclient = new DefaultHttpClient();
try {
HttpGet httpget = new HttpGet("https://geoip.maxmind.com/geoip/country/" + ip_address);
httpget.addHeader("Accept","application/json");
httpget.addHeader(BasicScheme.authenticate(
new UsernamePasswordCredentials(user_id,license_key),"UTF-8",false));
HttpResponse response = httpclient.execute(httpget);
HttpEntity entity = response.getEntity();
if (entity != null) {
InputStream instream = entity.getContent();
BufferedReader reader = new BufferedReader(
new InputStreamReader(instream));
return new Country(reader.readLine());
}
} catch (IOException e) {
e.printStackTrace();
} finally {
httpclient.getConnectionManager().shutdown();
}
return null;
}
#location 14
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public <T> Collection<T> loadAll(Class<T> type, Filters filters, SortOrder sortOrder, Pagination pagination, int depth) {
Transaction tx = session.ensureTransaction();
String entityType = session.entityType(type.getName());
QueryStatements queryStatements = session.queryStatementsFor(type);
// all this business about selecting which type of model/response to handle is horribly hacky
// it should be possible for the response handler to select based on the model implementation
// and we should have a single method loadAll(...). Filters should not be a special case
// though they are at the moment because of the problems with "graph" response format.
if (filters.isEmpty()) {
Query qry = queryStatements.findByType(entityType, depth)
.setSortOrder(sortOrder)
.setPagination(pagination);
try (Neo4jResponse<GraphModel> response = session.requestHandler().execute(qry, tx)) {
return session.responseHandler().loadAll(type, response);
}
} else {
filters = resolvePropertyAnnotations(type, filters);
Query qry = queryStatements.findByProperties(entityType, filters, depth)
.setSortOrder(sortOrder)
.setPagination(pagination);
if (depth != 0) {
try (Neo4jResponse<GraphRowModel> response = session.requestHandler().execute((GraphRowModelQuery) qry, tx)) {
return session.responseHandler().loadByProperty(type, response);
}
} else {
try (Neo4jResponse<GraphModel> response = session.requestHandler().execute(qry, tx)) {
return session.responseHandler().loadAll(type, response);
}
}
}
}
|
#vulnerable code
@Override
public <T> Collection<T> loadAll(Class<T> type, Filters filters, SortOrder sortOrder, Pagination pagination, int depth) {
String url = session.ensureTransaction().url();
String entityType = session.entityType(type.getName());
QueryStatements queryStatements = session.queryStatementsFor(type);
// all this business about selecting which type of model/response to handle is horribly hacky
// it should be possible for the response handler to select based on the model implementation
// and we should have a single method loadAll(...). Filters should not be a special case
// though they are at the moment because of the problems with "graph" response format.
if (filters.isEmpty()) {
Query qry = queryStatements.findByType(entityType, depth)
.setSortOrder(sortOrder)
.setPagination(pagination);
try (Neo4jResponse<GraphModel> response = session.requestHandler().execute(qry, url)) {
return session.responseHandler().loadAll(type, response);
}
} else {
filters = resolvePropertyAnnotations(type, filters);
Query qry = queryStatements.findByProperties(entityType, filters, depth)
.setSortOrder(sortOrder)
.setPagination(pagination);
if (depth != 0) {
try (Neo4jResponse<GraphRowModel> response = session.requestHandler().execute((GraphRowModelQuery) qry, url)) {
return session.responseHandler().loadByProperty(type, response);
}
} else {
try (Neo4jResponse<GraphModel> response = session.requestHandler().execute(qry, url)) {
return session.responseHandler().loadAll(type, response);
}
}
}
}
#location 4
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private boolean bothWayMappingRequired(Object srcObject, String relationshipType, Object tgtObject, String relationshipDirection) {
boolean mapBothWays = false;
ClassInfo tgtInfo = metaData.classInfo(tgtObject);
if(tgtInfo == null) {
LOGGER.warn("Unable to process {} on {}. Checck the mapping.", relationshipType, srcObject.getClass());
// #347. attribute is not a rel ? maybe would be better to change FieldInfo.persistableAsProperty ?
return false;
}
for (FieldInfo tgtRelReader : tgtInfo.relationshipFields()) {
String tgtRelationshipDirection = tgtRelReader.relationshipDirection();
if ((tgtRelationshipDirection.equals(Relationship.OUTGOING) || tgtRelationshipDirection.equals(Relationship.INCOMING)) //The relationship direction must be explicitly incoming or outgoing
&& tgtRelReader.relationshipType().equals(relationshipType)) { //The source must have the same relationship type to the target as the target to the source
//Moreover, the source must be related to the target and vice versa in the SAME direction
if (relationshipDirection.equals(tgtRelationshipDirection)) {
Object target = tgtRelReader.read(tgtObject);
if (target != null) {
if (target instanceof Iterable) {
for (Object relatedObject : (Iterable<?>) target) {
if (relatedObject.equals(srcObject)) { //the target is mapped to the source as well
mapBothWays = true;
}
}
} else if (target.getClass().isArray()) {
for (Object relatedObject : (Object[]) target) {
if (relatedObject.equals(srcObject)) { //the target is mapped to the source as well
mapBothWays = true;
}
}
} else {
if (target.equals(srcObject)) { //the target is mapped to the source as well
mapBothWays = true;
}
}
}
}
}
}
return mapBothWays;
}
|
#vulnerable code
private boolean bothWayMappingRequired(Object srcObject, String relationshipType, Object tgtObject, String relationshipDirection) {
boolean mapBothWays = false;
ClassInfo tgtInfo = metaData.classInfo(tgtObject);
for (FieldInfo tgtRelReader : tgtInfo.relationshipFields()) {
String tgtRelationshipDirection = tgtRelReader.relationshipDirection();
if ((tgtRelationshipDirection.equals(Relationship.OUTGOING) || tgtRelationshipDirection.equals(Relationship.INCOMING)) //The relationship direction must be explicitly incoming or outgoing
&& tgtRelReader.relationshipType().equals(relationshipType)) { //The source must have the same relationship type to the target as the target to the source
//Moreover, the source must be related to the target and vice versa in the SAME direction
if (relationshipDirection.equals(tgtRelationshipDirection)) {
Object target = tgtRelReader.read(tgtObject);
if (target != null) {
if (target instanceof Iterable) {
for (Object relatedObject : (Iterable<?>) target) {
if (relatedObject.equals(srcObject)) { //the target is mapped to the source as well
mapBothWays = true;
}
}
} else if (target.getClass().isArray()) {
for (Object relatedObject : (Object[]) target) {
if (relatedObject.equals(srcObject)) { //the target is mapped to the source as well
mapBothWays = true;
}
}
} else {
if (target.equals(srcObject)) { //the target is mapped to the source as well
mapBothWays = true;
}
}
}
}
}
}
return mapBothWays;
}
#location 5
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public Result query(String cypher, Map<String, ?> parameters, boolean readOnly) {
validateQuery(cypher, parameters, readOnly);
//If readOnly=true, just execute the query. If false, execute the query and return stats as well
if(readOnly) {
return new QueryResult(executeAndMap(null, cypher, parameters, new MapRowModelMapper()),null);
}
else {
Transaction tx = session.ensureTransaction();
RowModelQueryWithStatistics parameterisedStatement = new RowModelQueryWithStatistics(cypher, parameters);
try (Neo4jResponse<RowQueryStatisticsResult> response = session.requestHandler().execute(parameterisedStatement, tx)) {
RowQueryStatisticsResult result = response.next();
RowModelMapper rowModelMapper = new MapRowModelMapper();
Collection rowResult = new LinkedHashSet();
for (Iterator<Object> iterator = result.getRows().iterator(); iterator.hasNext(); ) {
List next = (List) iterator.next();
rowModelMapper.mapIntoResult(rowResult, next.toArray(), response.columns());
}
return new QueryResult(rowResult, result.getStats());
}
}
}
|
#vulnerable code
@Override
public Result query(String cypher, Map<String, ?> parameters, boolean readOnly) {
validateQuery(cypher, parameters, readOnly);
//If readOnly=true, just execute the query. If false, execute the query and return stats as well
if(readOnly) {
return new QueryResult(executeAndMap(null, cypher, parameters, new MapRowModelMapper()),null);
}
else {
String url = session.ensureTransaction().url();
RowModelQueryWithStatistics parameterisedStatement = new RowModelQueryWithStatistics(cypher, parameters);
try (Neo4jResponse<RowQueryStatisticsResult> response = session.requestHandler().execute(parameterisedStatement, url)) {
RowQueryStatisticsResult result = response.next();
RowModelMapper rowModelMapper = new MapRowModelMapper();
Collection rowResult = new LinkedHashSet();
for (Iterator<Object> iterator = result.getRows().iterator(); iterator.hasNext(); ) {
List next = (List) iterator.next();
rowModelMapper.mapIntoResult(rowResult, next.toArray(), response.columns());
}
return new QueryResult(rowResult, result.getStats());
}
}
}
#location 10
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public <T> Collection<T> loadAll(Collection<T> objects, SortOrder sortOrder, Pagination pagination, int depth) {
if (objects == null || objects.isEmpty()) {
return objects;
}
ClassInfo commonClassInfo = findCommonClassInfo(objects);
Set<Serializable> ids = new LinkedHashSet<>();
for (Object o : objects) {
FieldInfo idField;
if (commonClassInfo.hasPrimaryIndexField()) {
idField = commonClassInfo.primaryIndexField();
} else {
idField = commonClassInfo.identityField();
}
ids.add((Serializable) idField.readProperty(o));
}
return session.loadAll((Class<T>) commonClassInfo.getUnderlyingClass(), ids, sortOrder, pagination, depth);
}
|
#vulnerable code
public <T> Collection<T> loadAll(Collection<T> objects, SortOrder sortOrder, Pagination pagination, int depth) {
if (objects == null || objects.isEmpty()) {
return objects;
}
Set<Serializable> ids = new LinkedHashSet<>();
Class type = objects.iterator().next().getClass();
ClassInfo classInfo = session.metaData().classInfo(type.getName());
for (Object o : objects) {
FieldInfo idField;
if (classInfo.hasPrimaryIndexField()) {
idField = classInfo.primaryIndexField();
} else {
idField = classInfo.identityField();
}
ids.add((Serializable) idField.readProperty(o));
}
return session.loadAll(type, ids, sortOrder, pagination, depth);
}
#location 12
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void scan(List<String> classPaths, ClassFileProcessor processor) {
this.classPaths = classPaths;
this.processor = processor;
Set<File> classPathElements = getUniqueClasspathElements(classPaths);
LOGGER.debug("Classpath elements:");
for (File classPathElement : classPathElements) {
LOGGER.debug(classPathElement.getPath());
}
try {
for (File classPathElement : classPathElements) {
String path = classPathElement.getPath();
if (classPathElement.isDirectory()) {
scanFolder(classPathElement, path.length() + 1);
} else if (classPathElement.isFile()) {
String pathLower = path.toLowerCase();
if (pathLower.endsWith(".jar") || pathLower.endsWith(".zip")) {
scanZipFile(new ZipFile(classPathElement));
} else {
scanFile(classPathElement, classPathElement.getName());
}
}
}
processor.finish();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
|
#vulnerable code
public void scan(List<String> classPaths, ClassFileProcessor processor) {
this.classPaths = classPaths;
this.processor = processor;
List<File> classPathElements = getUniqueClasspathElements(classPaths);
try {
for (File classPathElement : classPathElements) {
String path = classPathElement.getPath();
if (classPathElement.isDirectory()) {
scanFolder(classPathElement, path.length() + 1);
} else if (classPathElement.isFile()) {
String pathLower = path.toLowerCase();
if (pathLower.endsWith(".jar") || pathLower.endsWith(".zip")) {
scanZipFile(new ZipFile(classPathElement));
} else {
scanFile(classPathElement, classPathElement.getName());
}
}
}
processor.finish();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
#location 15
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public <T> Collection<T> loadAll(Class<T> type, Filters filters, SortOrder sortOrder, Pagination pagination, int depth) {
Transaction tx = session.ensureTransaction();
String entityType = session.entityType(type.getName());
QueryStatements queryStatements = session.queryStatementsFor(type);
// all this business about selecting which type of model/response to handle is horribly hacky
// it should be possible for the response handler to select based on the model implementation
// and we should have a single method loadAll(...). Filters should not be a special case
// though they are at the moment because of the problems with "graph" response format.
if (filters.isEmpty()) {
Query qry = queryStatements.findByType(entityType, depth)
.setSortOrder(sortOrder)
.setPagination(pagination);
try (Neo4jResponse<GraphModel> response = session.requestHandler().execute(qry, tx)) {
return session.responseHandler().loadAll(type, response);
}
} else {
filters = resolvePropertyAnnotations(type, filters);
Query qry = queryStatements.findByProperties(entityType, filters, depth)
.setSortOrder(sortOrder)
.setPagination(pagination);
if (depth != 0) {
try (Neo4jResponse<GraphRowModel> response = session.requestHandler().execute((GraphRowModelQuery) qry, tx)) {
return session.responseHandler().loadByProperty(type, response);
}
} else {
try (Neo4jResponse<GraphModel> response = session.requestHandler().execute(qry, tx)) {
return session.responseHandler().loadAll(type, response);
}
}
}
}
|
#vulnerable code
@Override
public <T> Collection<T> loadAll(Class<T> type, Filters filters, SortOrder sortOrder, Pagination pagination, int depth) {
String url = session.ensureTransaction().url();
String entityType = session.entityType(type.getName());
QueryStatements queryStatements = session.queryStatementsFor(type);
// all this business about selecting which type of model/response to handle is horribly hacky
// it should be possible for the response handler to select based on the model implementation
// and we should have a single method loadAll(...). Filters should not be a special case
// though they are at the moment because of the problems with "graph" response format.
if (filters.isEmpty()) {
Query qry = queryStatements.findByType(entityType, depth)
.setSortOrder(sortOrder)
.setPagination(pagination);
try (Neo4jResponse<GraphModel> response = session.requestHandler().execute(qry, url)) {
return session.responseHandler().loadAll(type, response);
}
} else {
filters = resolvePropertyAnnotations(type, filters);
Query qry = queryStatements.findByProperties(entityType, filters, depth)
.setSortOrder(sortOrder)
.setPagination(pagination);
if (depth != 0) {
try (Neo4jResponse<GraphRowModel> response = session.requestHandler().execute((GraphRowModelQuery) qry, url)) {
return session.responseHandler().loadByProperty(type, response);
}
} else {
try (Neo4jResponse<GraphModel> response = session.requestHandler().execute(qry, url)) {
return session.responseHandler().loadAll(type, response);
}
}
}
}
#location 4
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public static void configure(Configuration configuration) {
destroy();
Components.configuration = configuration;
}
|
#vulnerable code
public static void configure(Configuration configuration) {
driver = null;
Components.configuration = configuration;
}
#location 2
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void shouldParseDataInRowResponseCorrectly() {
try (Response<DefaultRestModel> rsp = new TestRestHttpResponse((rowResultsAndNoErrors()))) {
DefaultRestModel restModel = rsp.next();
assertNotNull(restModel);
Map<String,Object> rows = restModel.getRow();
assertEquals(3,rows.entrySet().size());
assertEquals(1, rows.get("count"));
NodeModel data = (NodeModel) rows.get("director");
assertEquals(1931,data.property("born"));
data = (NodeModel) rows.get("movie");
assertEquals("The Birdcage", data.property("title"));
assertEquals(395L, data.getId().longValue());
restModel = rsp.next();
rows = restModel.getRow();
assertEquals(3,rows.entrySet().size());
assertEquals(1, rows.get("count"));
data = (NodeModel) rows.get("director");
assertEquals(1931,data.property("born"));
data = (NodeModel) rows.get("movie");
assertEquals(2007,data.property("released"));
}
}
|
#vulnerable code
@Test
public void shouldParseDataInRowResponseCorrectly() {
try (Response<DefaultRestModel> rsp = new TestRestHttpResponse((rowResultsAndNoErrors()))) {
DefaultRestModel restModel = rsp.next();
assertNotNull(restModel);
Object[] rows = restModel.getValues();
assertEquals(3,rows.length);
assertEquals(1, rows[0]);
Map data = (Map) rows[1];
assertEquals(1931,((Map)data.get("data")).get("born"));
data = (Map) rows[2];
assertEquals("The Birdcage", ((Map)data.get("data")).get("title"));
assertEquals(395, ((Map)data.get("metadata")).get("id"));
restModel = rsp.next();
rows = restModel.getValues();
assertEquals(3,rows.length);
assertEquals(1, rows[0]);
data = (Map) rows[1];
assertEquals(1931,((Map)data.get("data")).get("born"));
data = (Map) rows[2];
assertEquals(2007, ((Map)data.get("data")).get("released"));
}
}
#location 23
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private void setProperties(List<Property<String, Object>> propertyList, Object instance) {
ClassInfo classInfo = metadata.classInfo(instance);
getCompositeProperties(propertyList, classInfo).forEach( (field, v) -> field.write(instance, v));
for (Property<?, ?> property : propertyList) {
writeProperty(classInfo, instance, property);
}
}
|
#vulnerable code
private void setProperties(List<Property<String, Object>> propertyList, Object instance) {
ClassInfo classInfo = metadata.classInfo(instance);
Collection<FieldInfo> compositeFields = classInfo.fieldsInfo().compositeFields();
if (compositeFields.size() > 0) {
Map<String, ?> propertyMap = toMap(propertyList);
for (FieldInfo field : compositeFields) {
CompositeAttributeConverter<?> converter = field.getCompositeConverter();
Object value = converter.toEntityAttribute(propertyMap);
FieldInfo writer = classInfo.getFieldInfo(field.getName());
writer.write(instance, value);
}
}
for (Property<?, ?> property : propertyList) {
writeProperty(classInfo, instance, property);
}
}
#location 4
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public <T> Collection<T> loadAll(Class<T> type, Collection<Long> ids, SortOrder sortOrder, Pagination pagination, int depth) {
Transaction tx = session.ensureTransaction();
String entityType = session.entityType(type.getName());
QueryStatements queryStatements = session.queryStatementsFor(type);
Query qry = queryStatements.findAllByType(entityType, ids, depth)
.setSortOrder(sortOrder)
.setPagination(pagination);
try (Neo4jResponse<GraphModel> response = session.requestHandler().execute(qry, tx)) {
return session.responseHandler().loadAll(type, response);
}
}
|
#vulnerable code
@Override
public <T> Collection<T> loadAll(Class<T> type, Collection<Long> ids, SortOrder sortOrder, Pagination pagination, int depth) {
String url = session.ensureTransaction().url();
String entityType = session.entityType(type.getName());
QueryStatements queryStatements = session.queryStatementsFor(type);
Query qry = queryStatements.findAllByType(entityType, ids, depth)
.setSortOrder(sortOrder)
.setPagination(pagination);
try (Neo4jResponse<GraphModel> response = session.requestHandler().execute(qry, url)) {
return session.responseHandler().loadAll(type, response);
}
}
#location 4
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public FieldInfo propertyField(String propertyName) {
if (propertyFields == null) {
Collection<FieldInfo> fieldInfos = propertyFields();
propertyFields = new HashMap<>(fieldInfos.size());
for (FieldInfo fieldInfo : fieldInfos) {
propertyFields.put(fieldInfo.property(), fieldInfo);
}
}
return propertyFields.get(propertyName);
}
|
#vulnerable code
public FieldInfo propertyField(String propertyName) {
for (FieldInfo fieldInfo : propertyFields()) {
if (fieldInfo.property().equalsIgnoreCase(propertyName)) {
return fieldInfo;
}
}
return null;
}
#location 3
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public RelationalReader getIterableReader(ClassInfo classInfo, Class<?> parameterType, String relationshipType, String relationshipDirection) {
if(!iterableReaderCache.containsKey(classInfo)) {
iterableReaderCache.put(classInfo, new HashMap<DirectedRelationshipForType, RelationalReader>());
}
DirectedRelationshipForType directedRelationshipForType = new DirectedRelationshipForType(relationshipType,relationshipDirection, parameterType);
if(iterableReaderCache.get(classInfo).containsKey(directedRelationshipForType)) {
return iterableReaderCache.get(classInfo).get(directedRelationshipForType);
}
//1st find a method annotated with type and direction
MethodInfo methodInfo = getIterableGetterMethodInfo(classInfo, parameterType, relationshipType, relationshipDirection, STRICT_MODE);
if (methodInfo != null) {
MethodReader methodReader = new MethodReader(classInfo, methodInfo);
iterableReaderCache.get(classInfo).put(directedRelationshipForType, methodReader);
return methodReader;
}
//2nd find a field annotated with type and direction
FieldInfo fieldInfo = getIterableFieldInfo(classInfo, parameterType, relationshipType, relationshipDirection, STRICT_MODE);
if (fieldInfo != null) {
FieldReader fieldReader = new FieldReader(classInfo, fieldInfo);
iterableReaderCache.get(classInfo).put(directedRelationshipForType, fieldReader);
return fieldReader;
}
//If relationshipDirection=INCOMING, we should have found an annotated field already
if(!relationshipDirection.equals(Relationship.INCOMING)) {
//3rd find a method with implied type and direction
methodInfo = getIterableGetterMethodInfo(classInfo, parameterType, relationshipType, relationshipDirection, INFERRED_MODE);
if (methodInfo != null) {
MethodReader methodReader = new MethodReader(classInfo, methodInfo);
iterableReaderCache.get(classInfo).put(directedRelationshipForType, methodReader);
return methodReader;
}
//4th find a field with implied type and direction
fieldInfo = getIterableFieldInfo(classInfo, parameterType, relationshipType, relationshipDirection, INFERRED_MODE);
if (fieldInfo != null) {
FieldReader fieldReader = new FieldReader(classInfo, fieldInfo);
iterableReaderCache.get(classInfo).put(directedRelationshipForType, fieldReader);
return fieldReader;
}
}
iterableReaderCache.get(classInfo).put(directedRelationshipForType, null);
return null;
}
|
#vulnerable code
@Override
public RelationalReader getIterableReader(ClassInfo classInfo, Class<?> parameterType, String relationshipType, String relationshipDirection) {
if(iterableReaderCache.get(classInfo) == null) {
iterableReaderCache.put(classInfo, new HashMap<DirectedRelationshipForType, RelationalReader>());
}
DirectedRelationshipForType directedRelationshipForType = new DirectedRelationshipForType(relationshipType,relationshipDirection, parameterType);
if(iterableReaderCache.get(classInfo).containsKey(directedRelationshipForType)) {
return iterableReaderCache.get(classInfo).get(directedRelationshipForType);
}
//1st find a method annotated with type and direction
MethodInfo methodInfo = getIterableGetterMethodInfo(classInfo, parameterType, relationshipType, relationshipDirection, STRICT_MODE);
if (methodInfo != null) {
MethodReader methodReader = new MethodReader(classInfo, methodInfo);
iterableReaderCache.get(classInfo).put(directedRelationshipForType, methodReader);
return methodReader;
}
//2nd find a field annotated with type and direction
FieldInfo fieldInfo = getIterableFieldInfo(classInfo, parameterType, relationshipType, relationshipDirection, STRICT_MODE);
if (fieldInfo != null) {
FieldReader fieldReader = new FieldReader(classInfo, fieldInfo);
iterableReaderCache.get(classInfo).put(directedRelationshipForType, fieldReader);
return fieldReader;
}
//If relationshipDirection=INCOMING, we should have found an annotated field already
if(!relationshipDirection.equals(Relationship.INCOMING)) {
//3rd find a method with implied type and direction
methodInfo = getIterableGetterMethodInfo(classInfo, parameterType, relationshipType, relationshipDirection, INFERRED_MODE);
if (methodInfo != null) {
MethodReader methodReader = new MethodReader(classInfo, methodInfo);
iterableReaderCache.get(classInfo).put(directedRelationshipForType, methodReader);
return methodReader;
}
//4th find a field with implied type and direction
fieldInfo = getIterableFieldInfo(classInfo, parameterType, relationshipType, relationshipDirection, INFERRED_MODE);
if (fieldInfo != null) {
FieldReader fieldReader = new FieldReader(classInfo, fieldInfo);
iterableReaderCache.get(classInfo).put(directedRelationshipForType, fieldReader);
return fieldReader;
}
}
iterableReaderCache.get(classInfo).put(directedRelationshipForType, null);
return null;
}
#location 7
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public <T> T load(Class<T> type, Long id, int depth) {
Transaction tx = session.ensureTransaction();
QueryStatements queryStatements = session.queryStatementsFor(type);
Query qry = queryStatements.findOne(id,depth);
try (Neo4jResponse<GraphModel> response = session.requestHandler().execute(qry, tx)) {
return session.responseHandler().loadById(type, response, id);
}
}
|
#vulnerable code
@Override
public <T> T load(Class<T> type, Long id, int depth) {
String url = session.ensureTransaction().url();
QueryStatements queryStatements = session.queryStatementsFor(type);
Query qry = queryStatements.findOne(id,depth);
try (Neo4jResponse<GraphModel> response = session.requestHandler().execute(qry, url)) {
return session.responseHandler().loadById(type, response, id);
}
}
#location 3
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public Long nativeId(Object entity) {
ClassInfo classInfo = metaData.classInfo(entity);
if (classInfo == null) {
throw new IllegalArgumentException("Class " + entity.getClass() + " is not a valid entity class. "
+ "Please check the entity mapping.");
}
generateIdIfNecessary(entity, classInfo);
if (classInfo.hasIdentityField()) {
return EntityUtils.identity(entity, metaData);
} else {
FieldInfo fieldInfo = classInfo.primaryIndexField();
Object primaryId = fieldInfo.readProperty(entity);
if (primaryId == null) {
throw new MappingException("Field with primary id is null for entity " + entity);
}
LabelPrimaryId key = new LabelPrimaryId(classInfo, primaryId);
Long graphId = primaryIdToNativeId.get(key);
if (graphId == null) {
graphId = EntityUtils.nextRef();
primaryIdToNativeId.put(key, graphId);
}
return graphId;
}
}
|
#vulnerable code
public Long nativeId(Object entity) {
ClassInfo classInfo = metaData.classInfo(entity);
generateIdIfNecessary(entity, classInfo);
if (classInfo.hasIdentityField()) {
return EntityUtils.identity(entity, metaData);
} else {
FieldInfo fieldInfo = classInfo.primaryIndexField();
Object primaryId = fieldInfo.readProperty(entity);
if (primaryId == null) {
throw new MappingException("Field with primary id is null for entity " + entity);
}
LabelPrimaryId key = new LabelPrimaryId(classInfo, primaryId);
Long graphId = primaryIdToNativeId.get(key);
if (graphId == null) {
graphId = EntityUtils.nextRef();
primaryIdToNativeId.put(key, graphId);
}
return graphId;
}
}
#location 3
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public long countEntitiesOfType(Class<?> entity) {
ClassInfo classInfo = session.metaData().classInfo(entity.getName());
if (classInfo == null) {
return 0;
}
RowModelQuery countStatement = new AggregateStatements().countNodesLabelledWith(classInfo.labels());
Transaction tx = session.ensureTransaction();
try (Neo4jResponse<RowModel> response = session.requestHandler().execute(countStatement, tx)) {
RowModel queryResult = response.next();
return queryResult == null ? 0 : ((Number) queryResult.getValues()[0]).longValue();
}
}
|
#vulnerable code
@Override
public long countEntitiesOfType(Class<?> entity) {
ClassInfo classInfo = session.metaData().classInfo(entity.getName());
if (classInfo == null) {
return 0;
}
RowModelQuery countStatement = new AggregateStatements().countNodesLabelledWith(classInfo.labels());
String url = session.ensureTransaction().url();
try (Neo4jResponse<RowModel> response = session.requestHandler().execute(countStatement, url)) {
RowModel queryResult = response.next();
return queryResult == null ? 0 : ((Number) queryResult.getValues()[0]).longValue();
}
}
#location 9
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private Object createRelationshipEntity(Edge edge, Object startEntity, Object endEntity) {
ClassInfo relationClassInfo = getRelationshipEntity(edge);
if (relationClassInfo == null) {
throw new MappingException("Could not find a class to map for relation " + edge);
}
Map<String, Object> allProps = new HashMap<>(toMap(edge.getPropertyList()));
getCompositeProperties(edge.getPropertyList(), relationClassInfo).forEach( (k, v) -> {
allProps.put(k.getName(), v);
});
// also add start and end node as valid constructor values
allProps.put(relationClassInfo.getStartNodeReader().getName(), startEntity);
allProps.put(relationClassInfo.getEndNodeReader().getName(), endEntity);
// create and hydrate the new RE
Object relationshipEntity = entityFactory
.newObject(relationClassInfo.getUnderlyingClass(), allProps);
EntityUtils.setIdentity(relationshipEntity, edge.getId(), metadata);
// REs also have properties
setProperties(edge.getPropertyList(), relationshipEntity);
// register it in the mapping context
mappingContext.addRelationshipEntity(relationshipEntity, edge.getId());
// set the start and end entities
ClassInfo relEntityInfo = metadata.classInfo(relationshipEntity);
FieldInfo startNodeWriter = relEntityInfo.getStartNodeReader();
if (startNodeWriter != null) {
startNodeWriter.write(relationshipEntity, startEntity);
} else {
throw new RuntimeException(
"Cannot find a writer for the StartNode of relational entity " + relEntityInfo.name());
}
FieldInfo endNodeWriter = relEntityInfo.getEndNodeReader();
if (endNodeWriter != null) {
endNodeWriter.write(relationshipEntity, endEntity);
} else {
throw new RuntimeException(
"Cannot find a writer for the EndNode of relational entity " + relEntityInfo.name());
}
return relationshipEntity;
}
|
#vulnerable code
private Object createRelationshipEntity(Edge edge, Object startEntity, Object endEntity) {
// create and hydrate the new RE
Object relationshipEntity = entityFactory.newObject(getRelationshipEntity(edge));
EntityUtils.setIdentity(relationshipEntity, edge.getId(), metadata);
// REs also have properties
setProperties(edge.getPropertyList(), relationshipEntity);
// register it in the mapping context
mappingContext.addRelationshipEntity(relationshipEntity, edge.getId());
// set the start and end entities
ClassInfo relEntityInfo = metadata.classInfo(relationshipEntity);
FieldInfo startNodeWriter = relEntityInfo.getStartNodeReader();
if (startNodeWriter != null) {
startNodeWriter.write(relationshipEntity, startEntity);
} else {
throw new RuntimeException(
"Cannot find a writer for the StartNode of relational entity " + relEntityInfo.name());
}
FieldInfo endNodeWriter = relEntityInfo.getEndNodeReader();
if (endNodeWriter != null) {
endNodeWriter.write(relationshipEntity, endEntity);
} else {
throw new RuntimeException(
"Cannot find a writer for the EndNode of relational entity " + relEntityInfo.name());
}
return relationshipEntity;
}
#location 16
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private void mapOneToMany(Object instance, Class<?> valueType, Object values, String relationshipType, String relationshipDirection) {
ClassInfo classInfo = metadata.classInfo(instance);
RelationalWriter writer = EntityAccessManager.getIterableWriter(classInfo, valueType, relationshipType, relationshipDirection);
if (writer != null) {
if (writer.type().isArray() || Iterable.class.isAssignableFrom(writer.type())) {
RelationalReader reader = EntityAccessManager.getIterableReader(classInfo, valueType, relationshipType, relationshipDirection);
Object currentValues;
if (reader != null) {
currentValues = reader.read(instance);
if (writer.type().isArray()) {
values = EntityAccess.merge(writer.type(), (Iterable<?>) values, (Object[]) currentValues, valueType);
} else {
values = EntityAccess.merge(writer.type(), (Iterable<?>) values, (Collection) currentValues, valueType);
}
}
}
writer.write(instance, values);
return;
}
// this is not necessarily an error. but we can't tell.
logger.debug("Unable to map iterable of type: {} onto property of {}", valueType, classInfo.name());
}
|
#vulnerable code
private void mapOneToMany(Object instance, Class<?> valueType, Object values, String relationshipType, String relationshipDirection) {
ClassInfo classInfo = metadata.classInfo(instance);
RelationalWriter writer = entityAccessStrategy.getIterableWriter(classInfo, valueType, relationshipType, relationshipDirection);
if (writer != null) {
if (writer.type().isArray() || Iterable.class.isAssignableFrom(writer.type())) {
RelationalReader reader = entityAccessStrategy.getIterableReader(classInfo, valueType, relationshipType, relationshipDirection);
Object currentValues;
if (reader != null) {
currentValues = reader.read(instance);
if (writer.type().isArray()) {
values = EntityAccess.merge(writer.type(), (Iterable<?>) values, (Object[]) currentValues, valueType);
} else {
values = EntityAccess.merge(writer.type(), (Iterable<?>) values, (Collection) currentValues, valueType);
}
}
}
writer.write(instance, values);
return;
}
// this is not necessarily an error. but we can't tell.
logger.debug("Unable to map iterable of type: {} onto property of {}", valueType, classInfo.name());
}
#location 23
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private void hydrateCourses(Collection<Teacher> teachers) {
session.loadAll(Course.class);
}
|
#vulnerable code
private void hydrateCourses(Collection<Teacher> teachers) {
session.setDriver(new TeacherRequest());
session.setDriver(new CoursesRequest());
session.loadAll(Course.class);
}
#location 4
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public <T> void deleteAll(Class<T> type) {
ClassInfo classInfo = session.metaData().classInfo(type.getName());
if (classInfo != null) {
Transaction tx = session.ensureTransaction();
ParameterisedStatement request = getDeleteStatementsBasedOnType(type).deleteByType(session.entityType(classInfo.name()));
try (Neo4jResponse<String> response = session.requestHandler().execute(request, tx)) {
session.context().clear(type);
}
} else {
session.info(type.getName() + " is not a persistable class");
}
}
|
#vulnerable code
@Override
public <T> void deleteAll(Class<T> type) {
ClassInfo classInfo = session.metaData().classInfo(type.getName());
if (classInfo != null) {
String url = session.ensureTransaction().url();
ParameterisedStatement request = getDeleteStatementsBasedOnType(type).deleteByType(session.entityType(classInfo.name()));
try (Neo4jResponse<String> response = session.requestHandler().execute(request, url)) {
session.context().clear(type);
}
} else {
session.info(type.getName() + " is not a persistable class");
}
}
#location 5
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private String newTransactionUrl() {
String url = transactionEndpoint(driverConfig.getURI());
LOGGER.debug( "Thread: {}, POST {}", Thread.currentThread().getId(), url );
HttpPost request = new HttpPost(url);
try (CloseableHttpResponse response = executeHttpRequest(request)) {
Header location = response.getHeaders("Location")[0];
return location.getValue();
} catch (IOException ioe) {
throw new HttpRequestException(request, ioe);
}
}
|
#vulnerable code
private String newTransactionUrl() {
String url = transactionEndpoint(driverConfig.getURI());
LOGGER.debug( "Thread {}: POST {}", Thread.currentThread().getId(), url );
try (CloseableHttpResponse response = executeHttpRequest(new HttpPost(url))) {
Header location = response.getHeaders("Location")[0];
response.close();
return location.getValue();
} catch (Exception e) {
throw new ResultProcessingException("Could not obtain new Transaction: ", e);
}
}
#location 7
#vulnerability type NULL_DEREFERENCE
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.