instruction
stringclasses 1
value | output
stringlengths 64
69.4k
| input
stringlengths 205
32.4k
|
---|---|---|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public MulticastMessageChannel createUdpServer(final InetSocketAddress bindAddress, final ReadChannelThread readThread, final WriteChannelThread writeThread, final ChannelListener<? super MulticastMessageChannel> bindListener, final OptionMap optionMap) throws IOException {
if (optionMap.get(Options.MULTICAST, false)) {
final MulticastSocket socket = new MulticastSocket(bindAddress);
final BioMulticastUdpChannel channel = new BioMulticastUdpChannel(optionMap.get(Options.SEND_BUFFER, 8192), optionMap.get(Options.RECEIVE_BUFFER, 8192), socket);
channel.setReadThread(readThread);
channel.setWriteThread(writeThread);
channel.open();
//noinspection unchecked
ChannelListeners.invokeChannelListener(channel, bindListener);
return channel;
} else {
final DatagramChannel channel = DatagramChannel.open();
channel.configureBlocking(false);
channel.socket().bind(bindAddress);
final NioUdpChannel udpChannel = new NioUdpChannel(this, channel);
udpChannel.setReadThread(readThread);
udpChannel.setWriteThread(writeThread);
//noinspection unchecked
ChannelListeners.invokeChannelListener(udpChannel, bindListener);
return udpChannel;
}
}
|
#vulnerable code
public MulticastMessageChannel createUdpServer(final InetSocketAddress bindAddress, final ReadChannelThread readThread, final WriteChannelThread writeThread, final ChannelListener<? super MulticastMessageChannel> bindListener, final OptionMap optionMap) throws IOException {
if (optionMap.get(Options.MULTICAST, false)) {
return new BioMulticastUdpChannel(optionMap.get(Options.SEND_BUFFER, 8192), optionMap.get(Options.RECEIVE_BUFFER, 8192), new MulticastSocket());
} else {
final DatagramChannel channel = DatagramChannel.open();
channel.configureBlocking(false);
channel.socket().bind(bindAddress);
final NioUdpChannel udpChannel = new NioUdpChannel(this, channel);
udpChannel.setReadThread(readThread);
udpChannel.setWriteThread(writeThread);
//noinspection unchecked
ChannelListeners.invokeChannelListener(udpChannel, bindListener);
return udpChannel;
}
}
#location 13
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void start() throws IOException {
if (selectorThreadFactory == null) {
selectorThreadFactory = Executors.defaultThreadFactory();
}
if (executor == null) {
executor = IoUtils.directExecutor();
}
for (int i = 0; i < readSelectorThreads; i ++) {
readers.add(new NioSelectorRunnable());
}
for (int i = 0; i < writeSelectorThreads; i ++) {
writers.add(new NioSelectorRunnable());
}
for (int i = 0; i < connectionSelectorThreads; i ++) {
connectors.add(new NioSelectorRunnable());
}
for (NioSelectorRunnable runnable : readers) {
selectorThreadFactory.newThread(runnable).start();
}
for (NioSelectorRunnable runnable : writers) {
selectorThreadFactory.newThread(runnable).start();
}
for (NioSelectorRunnable runnable : connectors) {
selectorThreadFactory.newThread(runnable).start();
}
}
|
#vulnerable code
public void start() throws IOException {
if (selectorThreadFactory == null) {
selectorThreadFactory = Executors.defaultThreadFactory();
}
if (executor == null) {
executor = executorService = Executors.newCachedThreadPool();
}
for (int i = 0; i < readSelectorThreads; i ++) {
readers.add(new NioSelectorRunnable());
}
for (int i = 0; i < writeSelectorThreads; i ++) {
writers.add(new NioSelectorRunnable());
}
for (int i = 0; i < connectionSelectorThreads; i ++) {
connectors.add(new NioSelectorRunnable());
}
for (NioSelectorRunnable runnable : readers) {
selectorThreadFactory.newThread(runnable).start();
}
for (NioSelectorRunnable runnable : writers) {
selectorThreadFactory.newThread(runnable).start();
}
for (NioSelectorRunnable runnable : connectors) {
selectorThreadFactory.newThread(runnable).start();
}
}
#location 6
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public NioSocketStreamConnection accept() throws IOException {
final WorkerThread current = WorkerThread.getCurrent();
if (current == null) {
return null;
}
final NioTcpServerHandle handle = handles[current.getNumber()];
if (! handle.getConnection()) {
return null;
}
final SocketChannel accepted;
boolean ok = false;
try {
accepted = channel.accept();
if (accepted != null) try {
final SocketAddress localAddress = accepted.getLocalAddress();
int hash;
if (localAddress instanceof InetSocketAddress) {
final InetSocketAddress address = (InetSocketAddress) localAddress;
hash = address.getAddress().hashCode() * 23 + address.getPort();
} else if (localAddress instanceof LocalSocketAddress) {
hash = ((LocalSocketAddress) localAddress).getName().hashCode();
} else {
hash = localAddress.hashCode();
}
final SocketAddress remoteAddress = accepted.getRemoteAddress();
if (remoteAddress instanceof InetSocketAddress) {
final InetSocketAddress address = (InetSocketAddress) remoteAddress;
hash = (address.getAddress().hashCode() * 23 + address.getPort()) * 23 + hash;
} else if (remoteAddress instanceof LocalSocketAddress) {
hash = ((LocalSocketAddress) remoteAddress).getName().hashCode() * 23 + hash;
} else {
hash = localAddress.hashCode() * 23 + hash;
}
accepted.configureBlocking(false);
final Socket socket = accepted.socket();
socket.setKeepAlive(keepAlive != 0);
socket.setOOBInline(oobInline != 0);
socket.setTcpNoDelay(tcpNoDelay != 0);
final int sendBuffer = this.sendBuffer;
if (sendBuffer > 0) socket.setSendBufferSize(sendBuffer);
final WorkerThread ioThread = worker.getIoThread(hash);
final SelectionKey selectionKey = ioThread.registerChannel(accepted);
final NioSocketStreamConnection newConnection = new NioSocketStreamConnection(ioThread, selectionKey, handle);
newConnection.setOption(Options.READ_TIMEOUT, Integer.valueOf(readTimeout));
newConnection.setOption(Options.WRITE_TIMEOUT, Integer.valueOf(writeTimeout));
ok = true;
return newConnection;
} finally {
if (! ok) safeClose(accepted);
}
} catch (IOException e) {
return null;
} finally {
if (! ok) {
handle.freeConnection();
}
}
// by contract, only a resume will do
return null;
}
|
#vulnerable code
public NioSocketStreamConnection accept() throws IOException {
final WorkerThread current = WorkerThread.getCurrent();
final NioTcpServerHandle handle = handles[current.getNumber()];
if (! handle.getConnection()) {
return null;
}
final SocketChannel accepted;
boolean ok = false;
try {
accepted = channel.accept();
if (accepted != null) try {
final SocketAddress localAddress = accepted.getLocalAddress();
int hash;
if (localAddress instanceof InetSocketAddress) {
final InetSocketAddress address = (InetSocketAddress) localAddress;
hash = address.getAddress().hashCode() * 23 + address.getPort();
} else if (localAddress instanceof LocalSocketAddress) {
hash = ((LocalSocketAddress) localAddress).getName().hashCode();
} else {
hash = localAddress.hashCode();
}
final SocketAddress remoteAddress = accepted.getRemoteAddress();
if (remoteAddress instanceof InetSocketAddress) {
final InetSocketAddress address = (InetSocketAddress) remoteAddress;
hash = (address.getAddress().hashCode() * 23 + address.getPort()) * 23 + hash;
} else if (remoteAddress instanceof LocalSocketAddress) {
hash = ((LocalSocketAddress) remoteAddress).getName().hashCode() * 23 + hash;
} else {
hash = localAddress.hashCode() * 23 + hash;
}
accepted.configureBlocking(false);
final Socket socket = accepted.socket();
socket.setKeepAlive(keepAlive != 0);
socket.setOOBInline(oobInline != 0);
socket.setTcpNoDelay(tcpNoDelay != 0);
final int sendBuffer = this.sendBuffer;
if (sendBuffer > 0) socket.setSendBufferSize(sendBuffer);
final WorkerThread ioThread = worker.getIoThread(hash);
final SelectionKey selectionKey = ioThread.registerChannel(accepted);
final NioSocketStreamConnection newConnection = new NioSocketStreamConnection(ioThread, selectionKey, handle);
newConnection.setOption(Options.READ_TIMEOUT, Integer.valueOf(readTimeout));
newConnection.setOption(Options.WRITE_TIMEOUT, Integer.valueOf(writeTimeout));
ok = true;
return newConnection;
} finally {
if (! ok) safeClose(accepted);
}
} catch (IOException e) {
return null;
} finally {
if (! ok) {
handle.freeConnection();
}
}
// by contract, only a resume will do
return null;
}
#location 3
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void stop() throws IOException {
final List<Channel> channels;
synchronized (managedChannelSet) {
channels = new ArrayList<Channel>(managedChannelSet);
managedChannelSet.clear();
}
for (Channel channel : channels) {
IoUtils.safeClose(channel);
}
for (NioSelectorRunnable runnable : readers) {
runnable.shutdown();
}
for (NioSelectorRunnable runnable : writers) {
runnable.shutdown();
}
for (NioSelectorRunnable runnable : connectors) {
runnable.shutdown();
}
readers.clear();
writers.clear();
connectors.clear();
}
|
#vulnerable code
public void stop() throws IOException {
final List<Channel> channels;
synchronized (managedChannelSet) {
channels = new ArrayList<Channel>(managedChannelSet);
managedChannelSet.clear();
}
for (Channel channel : channels) {
IoUtils.safeClose(channel);
}
for (NioSelectorRunnable runnable : readers) {
runnable.shutdown();
}
for (NioSelectorRunnable runnable : writers) {
runnable.shutdown();
}
for (NioSelectorRunnable runnable : connectors) {
runnable.shutdown();
}
readers.clear();
writers.clear();
connectors.clear();
if (executorService != null) {
try {
AccessController.doPrivileged(new PrivilegedAction<Void>() {
public Void run() {
executorService.shutdown();
return null;
}
});
} catch (Throwable t) {
log.trace(t, "Failed to shut down executor service");
} finally {
executorService = null;
}
}
}
#location 22
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public Schema encodeSchema(SparkMLEncoder encoder){
T model = getTransformer();
Label label = null;
if(model instanceof HasLabelCol){
HasLabelCol hasLabelCol = (HasLabelCol)model;
String labelCol = hasLabelCol.getLabelCol();
Feature feature = encoder.getOnlyFeature(labelCol);
MiningFunction miningFunction = getMiningFunction();
switch(miningFunction){
case CLASSIFICATION:
{
if(feature instanceof BooleanFeature){
BooleanFeature booleanFeature = (BooleanFeature)feature;
label = new CategoricalLabel(booleanFeature.getName(), booleanFeature.getDataType(), booleanFeature.getValues());
} else
if(feature instanceof CategoricalFeature){
CategoricalFeature categoricalFeature = (CategoricalFeature)feature;
DataField dataField = (DataField)categoricalFeature.getField();
label = new CategoricalLabel(dataField);
} else
if(feature instanceof ContinuousFeature){
ContinuousFeature continuousFeature = (ContinuousFeature)feature;
int numClasses = 2;
if(model instanceof ClassificationModel){
ClassificationModel<?, ?> classificationModel = (ClassificationModel<?, ?>)model;
numClasses = classificationModel.numClasses();
}
List<Integer> categories = LabelUtil.createTargetCategories(numClasses);
Field<?> field = encoder.toCategorical(continuousFeature.getName(), categories);
encoder.putOnlyFeature(labelCol, new CategoricalFeature(encoder, field, categories));
label = new CategoricalLabel(field.getName(), field.getDataType(), categories);
} else
{
throw new IllegalArgumentException("Expected a categorical or categorical-like continuous feature, got " + feature);
}
}
break;
case REGRESSION:
{
Field<?> field = encoder.toContinuous(feature.getName());
field.setDataType(DataType.DOUBLE);
label = new ContinuousLabel(field.getName(), field.getDataType());
}
break;
default:
throw new IllegalArgumentException("Mining function " + miningFunction + " is not supported");
}
}
if(model instanceof ClassificationModel){
ClassificationModel<?, ?> classificationModel = (ClassificationModel<?, ?>)model;
int numClasses = classificationModel.numClasses();
CategoricalLabel categoricalLabel = (CategoricalLabel)label;
SchemaUtil.checkSize(numClasses, categoricalLabel);
}
String featuresCol = model.getFeaturesCol();
List<Feature> features = encoder.getFeatures(featuresCol);
if(model instanceof PredictionModel){
PredictionModel<?, ?> predictionModel = (PredictionModel<?, ?>)model;
int numFeatures = predictionModel.numFeatures();
if(numFeatures != -1){
SchemaUtil.checkSize(numFeatures, features);
}
}
Schema result = new Schema(label, features);
checkSchema(result);
return result;
}
|
#vulnerable code
public Schema encodeSchema(SparkMLEncoder encoder){
T model = getTransformer();
Label label = null;
if(model instanceof HasLabelCol){
HasLabelCol hasLabelCol = (HasLabelCol)model;
String labelCol = hasLabelCol.getLabelCol();
Feature feature = encoder.getOnlyFeature(labelCol);
MiningFunction miningFunction = getMiningFunction();
switch(miningFunction){
case CLASSIFICATION:
{
if(feature instanceof BooleanFeature){
BooleanFeature booleanFeature = (BooleanFeature)feature;
label = new CategoricalLabel(booleanFeature.getName(), booleanFeature.getDataType(), booleanFeature.getValues());
} else
if(feature instanceof CategoricalFeature){
CategoricalFeature categoricalFeature = (CategoricalFeature)feature;
DataField dataField = (DataField)categoricalFeature.getField();
label = new CategoricalLabel(dataField);
} else
if(feature instanceof ContinuousFeature){
ContinuousFeature continuousFeature = (ContinuousFeature)feature;
int numClasses = 2;
if(model instanceof ClassificationModel){
ClassificationModel<?, ?> classificationModel = (ClassificationModel<?, ?>)model;
numClasses = classificationModel.numClasses();
}
List<String> categories = new ArrayList<>();
for(int i = 0; i < numClasses; i++){
categories.add(String.valueOf(i));
}
Field<?> field = encoder.toCategorical(continuousFeature.getName(), categories);
encoder.putOnlyFeature(labelCol, new CategoricalFeature(encoder, field, categories));
label = new CategoricalLabel(field.getName(), field.getDataType(), categories);
} else
{
throw new IllegalArgumentException("Expected a categorical or categorical-like continuous feature, got " + feature);
}
}
break;
case REGRESSION:
{
Field<?> field = encoder.toContinuous(feature.getName());
field.setDataType(DataType.DOUBLE);
label = new ContinuousLabel(field.getName(), field.getDataType());
}
break;
default:
throw new IllegalArgumentException("Mining function " + miningFunction + " is not supported");
}
}
if(model instanceof ClassificationModel){
ClassificationModel<?, ?> classificationModel = (ClassificationModel<?, ?>)model;
int numClasses = classificationModel.numClasses();
CategoricalLabel categoricalLabel = (CategoricalLabel)label;
SchemaUtil.checkSize(numClasses, categoricalLabel);
}
String featuresCol = model.getFeaturesCol();
List<Feature> features = encoder.getFeatures(featuresCol);
if(model instanceof PredictionModel){
PredictionModel<?, ?> predictionModel = (PredictionModel<?, ?>)model;
int numFeatures = predictionModel.numFeatures();
if(numFeatures != -1){
SchemaUtil.checkSize(numFeatures, features);
}
}
Schema result = new Schema(label, features);
SchemaUtil.checkSchema(result);
return result;
}
#location 81
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
static
private MapValues createMapValues(FieldName name, Object identifier, List<Feature> features, List<Double> coefficients){
ListIterator<Feature> featureIt = features.listIterator();
ListIterator<Double> coefficientIt = coefficients.listIterator();
PMMLEncoder encoder = null;
List<Object> inputValues = new ArrayList<>();
List<Double> outputValues = new ArrayList<>();
while(featureIt.hasNext()){
Feature feature = featureIt.next();
Double coefficient = coefficientIt.next();
if(!(feature instanceof BinaryFeature)){
continue;
}
BinaryFeature binaryFeature = (BinaryFeature)feature;
if(!(name).equals(binaryFeature.getName())){
continue;
}
featureIt.remove();
coefficientIt.remove();
if(encoder == null){
encoder = binaryFeature.getEncoder();
}
inputValues.add(binaryFeature.getValue());
outputValues.add(coefficient);
}
MapValues mapValues = PMMLUtil.createMapValues(name, inputValues, outputValues)
.setDefaultValue(0d)
.setDataType(DataType.DOUBLE);
DerivedField derivedField = encoder.createDerivedField(FieldName.create("lookup(" + name.getValue() + (identifier != null ? (", " + identifier) : "") + ")"), OpType.CONTINUOUS, DataType.DOUBLE, mapValues);
featureIt.add(new ContinuousFeature(encoder, derivedField));
coefficientIt.add(1d);
return mapValues;
}
|
#vulnerable code
static
private MapValues createMapValues(FieldName name, Object identifier, List<Feature> features, List<Double> coefficients){
ListIterator<Feature> featureIt = features.listIterator();
ListIterator<Double> coefficientIt = coefficients.listIterator();
PMMLEncoder encoder = null;
List<Object> inputValues = new ArrayList<>();
List<Double> outputValues = new ArrayList<>();
while(featureIt.hasNext()){
Feature feature = featureIt.next();
Double coefficient = coefficientIt.next();
if(!(feature instanceof BinaryFeature)){
continue;
}
BinaryFeature binaryFeature = (BinaryFeature)feature;
if(!(name).equals(binaryFeature.getName())){
continue;
}
featureIt.remove();
coefficientIt.remove();
if(encoder == null){
encoder = binaryFeature.getEncoder();
}
inputValues.add(binaryFeature.getValue());
outputValues.add(coefficient);
}
MapValues mapValues = PMMLUtil.createMapValues(name, inputValues, outputValues)
.setDefaultValue(0d);
DerivedField derivedField = encoder.createDerivedField(FieldName.create("lookup(" + name.getValue() + (identifier != null ? (", " + identifier) : "") + ")"), OpType.CONTINUOUS, DataType.DOUBLE, mapValues);
featureIt.add(new ContinuousFeature(encoder, derivedField));
coefficientIt.add(1d);
return mapValues;
}
#location 38
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public Schema encodeSchema(SparkMLEncoder encoder){
T model = getTransformer();
Label label = null;
if(model instanceof HasLabelCol){
HasLabelCol hasLabelCol = (HasLabelCol)model;
String labelCol = hasLabelCol.getLabelCol();
Feature feature = encoder.getOnlyFeature(labelCol);
MiningFunction miningFunction = getMiningFunction();
switch(miningFunction){
case CLASSIFICATION:
{
if(feature instanceof BooleanFeature){
BooleanFeature booleanFeature = (BooleanFeature)feature;
label = new CategoricalLabel(booleanFeature.getName(), booleanFeature.getDataType(), booleanFeature.getValues());
} else
if(feature instanceof CategoricalFeature){
CategoricalFeature categoricalFeature = (CategoricalFeature)feature;
DataField dataField = (DataField)categoricalFeature.getField();
label = new CategoricalLabel(dataField);
} else
if(feature instanceof ContinuousFeature){
ContinuousFeature continuousFeature = (ContinuousFeature)feature;
int numClasses = 2;
if(model instanceof ClassificationModel){
ClassificationModel<?, ?> classificationModel = (ClassificationModel<?, ?>)model;
numClasses = classificationModel.numClasses();
}
List<String> categories = new ArrayList<>();
for(int i = 0; i < numClasses; i++){
categories.add(String.valueOf(i));
}
Field<?> field = encoder.toCategorical(continuousFeature.getName(), categories);
encoder.putOnlyFeature(labelCol, new CategoricalFeature(encoder, field, categories));
label = new CategoricalLabel(field.getName(), field.getDataType(), categories);
} else
{
throw new IllegalArgumentException("Expected a categorical or categorical-like continuous feature, got " + feature);
}
}
break;
case REGRESSION:
{
Field<?> field = encoder.toContinuous(feature.getName());
field.setDataType(DataType.DOUBLE);
label = new ContinuousLabel(field.getName(), field.getDataType());
}
break;
default:
throw new IllegalArgumentException("Mining function " + miningFunction + " is not supported");
}
}
if(model instanceof ClassificationModel){
ClassificationModel<?, ?> classificationModel = (ClassificationModel<?, ?>)model;
int numClasses = classificationModel.numClasses();
CategoricalLabel categoricalLabel = (CategoricalLabel)label;
SchemaUtil.checkSize(numClasses, categoricalLabel);
}
String featuresCol = model.getFeaturesCol();
List<Feature> features = encoder.getFeatures(featuresCol);
if(model instanceof PredictionModel){
PredictionModel<?, ?> predictionModel = (PredictionModel<?, ?>)model;
int numFeatures = predictionModel.numFeatures();
if(numFeatures != -1){
SchemaUtil.checkSize(numFeatures, features);
}
}
Schema result = new Schema(label, features);
return result;
}
|
#vulnerable code
public Schema encodeSchema(SparkMLEncoder encoder){
T model = getTransformer();
Label label = null;
if(model instanceof HasLabelCol){
HasLabelCol hasLabelCol = (HasLabelCol)model;
String labelCol = hasLabelCol.getLabelCol();
Feature feature = encoder.getOnlyFeature(labelCol);
MiningFunction miningFunction = getMiningFunction();
switch(miningFunction){
case CLASSIFICATION:
{
if(feature instanceof BooleanFeature){
BooleanFeature booleanFeature = (BooleanFeature)feature;
label = new CategoricalLabel(booleanFeature.getName(), booleanFeature.getDataType(), booleanFeature.getValues());
} else
if(feature instanceof CategoricalFeature){
CategoricalFeature categoricalFeature = (CategoricalFeature)feature;
DataField dataField = (DataField)categoricalFeature.getField();
label = new CategoricalLabel(dataField);
} else
if(feature instanceof ContinuousFeature){
ContinuousFeature continuousFeature = (ContinuousFeature)feature;
int numClasses = 2;
if(model instanceof ClassificationModel){
ClassificationModel<?, ?> classificationModel = (ClassificationModel<?, ?>)model;
numClasses = classificationModel.numClasses();
}
List<String> categories = new ArrayList<>();
for(int i = 0; i < numClasses; i++){
categories.add(String.valueOf(i));
}
Field<?> field = encoder.toCategorical(continuousFeature.getName(), categories);
encoder.putOnlyFeature(labelCol, new CategoricalFeature(encoder, field, categories));
label = new CategoricalLabel(field.getName(), field.getDataType(), categories);
} else
{
throw new IllegalArgumentException("Expected a categorical or categorical-like continuous feature, got " + feature);
}
}
break;
case REGRESSION:
{
Field<?> field = encoder.toContinuous(feature.getName());
field.setDataType(DataType.DOUBLE);
label = new ContinuousLabel(field.getName(), field.getDataType());
}
break;
default:
throw new IllegalArgumentException("Mining function " + miningFunction + " is not supported");
}
}
if(model instanceof ClassificationModel){
ClassificationModel<?, ?> classificationModel = (ClassificationModel<?, ?>)model;
CategoricalLabel categoricalLabel = (CategoricalLabel)label;
int numClasses = classificationModel.numClasses();
if(numClasses != categoricalLabel.size()){
throw new IllegalArgumentException("Expected " + numClasses + " target categories, got " + categoricalLabel.size() + " target categories");
}
}
String featuresCol = model.getFeaturesCol();
List<Feature> features = encoder.getFeatures(featuresCol);
if(model instanceof PredictionModel){
PredictionModel<?, ?> predictionModel = (PredictionModel<?, ?>)model;
int numFeatures = predictionModel.numFeatures();
if(numFeatures != -1 && features.size() != numFeatures){
throw new IllegalArgumentException("Expected " + numFeatures + " features, got " + features.size() + " features");
}
}
Schema result = new Schema(label, features);
return result;
}
#location 80
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
SortedMap<Long, Long> getOffsets(MovieFragmentBox moof, long trackId) {
isoBufferWrapper = moof.getIsoFile().getOriginalIso();
SortedMap<Long, Long> offsets2Sizes = new TreeMap<Long, Long>();
List<TrackFragmentBox> traf = moof.getBoxes(TrackFragmentBox.class);
for (TrackFragmentBox trackFragmentBox : traf) {
if (trackFragmentBox.getTrackFragmentHeaderBox().getTrackId() == trackId) {
long baseDataOffset;
if (trackFragmentBox.getTrackFragmentHeaderBox().hasBaseDataOffset()) {
baseDataOffset = trackFragmentBox.getTrackFragmentHeaderBox().getBaseDataOffset();
} else {
baseDataOffset = moof.getOffset();
}
for (TrackRunBox trun: trackFragmentBox.getBoxes(TrackRunBox.class)) {
long sampleBaseOffset = baseDataOffset + trun.getDataOffset();
long[] sampleOffsets = trun.getSampleOffsets();
long[] sampleSizes = trun.getSampleSizes();
for (int i = 0; i < sampleSizes.length; i++) {
offsets2Sizes.put(sampleOffsets[i] + sampleBaseOffset, sampleSizes[i]);
}
}
}
}
return offsets2Sizes;
}
|
#vulnerable code
SortedMap<Long, Long> getOffsets(MovieFragmentBox moof, long trackId) {
isoBufferWrapper = moof.getIsoFile().getOriginalIso();
SortedMap<Long, Long> offsets2Sizes = new TreeMap<Long, Long>();
List<TrackFragmentBox> traf = moof.getBoxes(TrackFragmentBox.class);
assert traf.size() == 1 : "I cannot deal with movie fragments containing more than one track fragment";
for (TrackFragmentBox trackFragmentBox : traf) {
if (trackFragmentBox.getTrackFragmentHeaderBox().getTrackId() == trackId) {
long baseDataOffset;
if (trackFragmentBox.getTrackFragmentHeaderBox().hasBaseDataOffset()) {
baseDataOffset = trackFragmentBox.getTrackFragmentHeaderBox().getBaseDataOffset();
} else {
baseDataOffset = moof.getOffset();
}
TrackRunBox trun = trackFragmentBox.getTrackRunBox();
long sampleBaseOffset = baseDataOffset + trun.getDataOffset();
long[] sampleOffsets = trun.getSampleOffsets();
long[] sampleSizes = trun.getSampleSizes();
for (int i = 0; i < sampleSizes.length; i++) {
offsets2Sizes.put(sampleOffsets[i] + sampleBaseOffset, sampleSizes[i]);
}
}
}
return offsets2Sizes;
}
#location 15
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void testRoundTrip_1(String resource) throws Exception {
File originalFile = File.createTempFile("pdcf", "original");
FileOutputStream fos = new FileOutputStream(originalFile);
byte[] content = read(getClass().getResourceAsStream(resource));
fos.write(content);
fos.close();
IsoFile isoFile = new IsoFile(InputStreamIsoBufferHelper.get(getClass().getResourceAsStream(resource), 20000));
isoFile.parse();
Walk.through(isoFile);
isoFile.parseMdats();
isoFile.switchToAutomaticChunkOffsetBox();
isoFile.getBoxes(MediaDataBox.class)[0].getSample(0).toString();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
isoFile.write(baos);
new FileOutputStream("/home/sannies/a").write(baos.toByteArray());
ArrayAssert.assertEquals(content, baos.toByteArray());
}
|
#vulnerable code
public void testRoundTrip_1(String resource) throws Exception {
File originalFile = File.createTempFile("pdcf", "original");
FileOutputStream fos = new FileOutputStream(originalFile);
byte[] content = read(getClass().getResourceAsStream(resource));
fos.write(content);
fos.close();
IsoFile isoFile = new IsoFile(InputStreamIsoBufferHelper.get(getClass().getResourceAsStream(resource), 20000));
isoFile.parse();
Walk.through(isoFile);
isoFile.parseMdats();
// isoFile.switchToAutomaticChunkOffsetBox();
// isoFile.getBoxes(MediaDataBox.class)[0].getSample(0).toString();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
isoFile.write(baos);
new FileOutputStream("/home/sannies/a").write(baos.toByteArray());
ArrayAssert.assertEquals(content, baos.toByteArray());
}
#location 18
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private void useLatestReleases( ModifiedPomXMLEventReader pom, Collection dependencies )
throws XMLStreamException, MojoExecutionException
{
Iterator i = dependencies.iterator();
while ( i.hasNext() )
{
Dependency dep = (Dependency) i.next();
if ( isExcludeReactor() && isProducedByReactor( dep ) )
{
getLog().info( "Ignoring reactor dependency: " + toString( dep ) );
continue;
}
String version = dep.getVersion();
Matcher versionMatcher = matchSnapshotRegex.matcher( version );
if ( !versionMatcher.matches() )
{
Artifact artifact = this.toArtifact( dep );
if ( !isIncluded( artifact ) )
{
continue;
}
getLog().debug( "Looking for newer versions of " + toString( dep ) );
ArtifactVersions versions = getHelper().lookupArtifactVersions( artifact, false );
ArtifactVersion[] newer = versions.getNewerVersions( version, false );
if ( newer.length > 0 )
{
String newVersion = newer[newer.length - 1].toString();
if ( PomHelper.setDependencyVersion( pom, dep.getGroupId(), dep.getArtifactId(), version,
newVersion ) )
{
getLog().info( "Updated " + toString( dep ) + " to version " + newVersion );
}
}
}
}
}
|
#vulnerable code
private void useLatestReleases( ModifiedPomXMLEventReader pom, Collection dependencies )
throws XMLStreamException, MojoExecutionException
{
Iterator i = dependencies.iterator();
while ( i.hasNext() )
{
Dependency dep = (Dependency) i.next();
if ( isExcludeReactor() && isProducedByReactor( dep ) )
{
continue;
}
String version = dep.getVersion();
Matcher versionMatcher = matchSnapshotRegex.matcher( version );
if ( !versionMatcher.matches() )
{
Artifact artifact = this.findArtifact( dep );
if ( !isIncluded( artifact ) )
{
continue;
}
ArtifactVersions versions = getHelper().lookupArtifactVersions( artifact, false );
try
{
ArtifactVersion[] newer = versions.getNewerVersions( artifact.getSelectedVersion(), false );
if ( newer.length > 0 )
{
if ( PomHelper.setDependencyVersion( pom, dep.getGroupId(), dep.getArtifactId(), version,
newer[newer.length - 1].toString() ) )
{
getLog().debug(
"Version set to " + newer[newer.length - 1].toString() + " for dependnecy: " + dep );
}
}
}
catch ( OverConstrainedVersionException e )
{
getLog().warn( "This should never happen as your build should not work at all if this is thrown",
e );
}
}
}
}
#location 25
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void execute()
throws MojoExecutionException, MojoFailureException
{
Set childModules = PomHelper.getAllChildModules( getProject(), getLog() );
PomHelper.removeMissingChildModules( getLog(), getProject(), childModules );
Iterator i = childModules.iterator();
MojoExecutionException pbe = null;
while ( i.hasNext() )
{
String modulePath = (String) i.next();
File moduleDir = new File( getProject().getBasedir(), modulePath );
File moduleProjectFile;
if ( moduleDir.isDirectory() )
{
moduleProjectFile = new File( moduleDir, "pom.xml" );
}
else
{
// i don't think this should ever happen... but just in case
// the module references the file-name
moduleProjectFile = moduleDir;
}
try
{
// the aim of this goal is to fix problems when the project cannot be parsed by Maven
// so we have to parse the file by hand!
StringBuffer childPom = readFile( moduleProjectFile );
ModifiedPomXMLEventReader pom = newModifiedPomXER( childPom );
Artifact parent = PomHelper.getProjectParent( pom, getHelper() );
if ( parent == null )
{
getLog().info( "Module: " + modulePath + " does not have a parent" );
}
else if ( !getProject().getGroupId().equals( parent.getGroupId() )
|| !getProject().getArtifactId().equals( parent.getArtifactId() ) )
{
getLog().info( "Module: " + modulePath + " does not use "
+ ArtifactUtils.versionlessKey( getProject().getArtifact() ) + " as its parent" );
}
else if ( getProject().getVersion().equals( parent.getVersion() ) )
{
getLog().info( "Module: " + modulePath + " parent is "
+ ArtifactUtils.versionlessKey( getProject().getArtifact() ) + ":"
+ getProject().getVersion() );
}
else
{
getLog().info( "Module: " + modulePath + " parent was "
+ ArtifactUtils.versionlessKey( getProject().getArtifact() ) + ":" + parent.getVersion()
+ ", now " + ArtifactUtils.versionlessKey( getProject().getArtifact() ) + ":"
+ getProject().getVersion() );
process( moduleProjectFile );
}
}
catch ( XMLStreamException e )
{
getLog().debug( "Could not parse " + moduleProjectFile.getPath(), e );
if ( pbe == null )
{
// save this until we get to the end.
pbe = new MojoExecutionException( "Could not parse " + moduleProjectFile.getPath(), e );
}
}
catch ( IOException e )
{
getLog().debug( "Could not parse " + moduleProjectFile.getPath(), e );
if ( pbe == null )
{
// save this until we get to the end.
pbe = new MojoExecutionException( "Could not parse " + moduleProjectFile.getPath(), e );
}
}
}
if ( pbe != null )
{
// ok, now throw the first one to blow up.
throw pbe;
}
}
|
#vulnerable code
public void execute()
throws MojoExecutionException, MojoFailureException
{
Set childModules = getAllChildModules( getProject() );
removeMissingChildModules( getProject(), childModules );
Iterator i = childModules.iterator();
MojoExecutionException pbe = null;
while ( i.hasNext() )
{
String modulePath = (String) i.next();
File moduleDir = new File( getProject().getBasedir(), modulePath );
File moduleProjectFile;
if ( moduleDir.isDirectory() )
{
moduleProjectFile = new File( moduleDir, "pom.xml" );
}
else
{
// i don't think this should ever happen... but just in case
// the module references the file-name
moduleProjectFile = moduleDir;
}
try
{
// the aim of this goal is to fix problems when the project cannot be parsed by Maven
// so we have to parse the file by hand!
StringBuffer childPom = readFile( moduleProjectFile );
ModifiedPomXMLEventReader pom = newModifiedPomXER( childPom );
Stack stack = new Stack();
String path = "";
String groupId = null;
String artifactId = null;
String version = null;
Pattern pattern = Pattern.compile( "/project/parent/(groupId|artifactId|version)" );
while ( pom.hasNext() )
{
XMLEvent event = pom.nextEvent();
if ( event.isStartDocument() )
{
path = "";
stack.clear();
}
else if ( event.isStartElement() )
{
stack.push( path );
path = path + "/" + event.asStartElement().getName().getLocalPart();
if ( pattern.matcher( path ).matches() )
{
String text = pom.getElementText().trim();
if ( path.endsWith( "groupId" ) )
{
groupId = text;
}
else if ( path.endsWith( "artifactId" ) )
{
artifactId = text;
}
else if ( path.endsWith( "version" ) )
{
version = text;
}
path = (String) stack.pop();
}
}
else if ( event.isEndElement() )
{
if ( "/project/parent".equals( path ) )
{
getLog().info( "Module: " + modulePath );
if ( getProject().getGroupId().equals( groupId ) && getProject().getArtifactId().equals(
artifactId ) )
{
if ( getProject().getVersion().equals( version ) )
{
getLog().info( " Parent is "
+ ArtifactUtils.versionlessKey( getProject().getArtifact() ) + ":"
+ getProject().getVersion() );
}
else
{
getLog().info( " Parent was "
+ ArtifactUtils.versionlessKey( getProject().getArtifact() ) + ":" + version
+ ", now " + ArtifactUtils.versionlessKey( getProject().getArtifact() ) + ":"
+ getProject().getVersion() );
process( moduleProjectFile );
}
}
else
{
getLog().info( " does not use "
+ ArtifactUtils.versionlessKey( getProject().getArtifact() ) + " as its parent" );
}
}
path = (String) stack.pop();
}
}
}
catch ( XMLStreamException e )
{
getLog().debug( "Could not parse " + moduleProjectFile.getPath(), e );
if ( pbe == null )
{
// save this until we get to the end.
pbe = new MojoExecutionException( "Could not parse " + moduleProjectFile.getPath(), e );
}
}
catch ( IOException e )
{
getLog().debug( "Could not parse " + moduleProjectFile.getPath(), e );
if ( pbe == null )
{
// save this until we get to the end.
pbe = new MojoExecutionException( "Could not parse " + moduleProjectFile.getPath(), e );
}
}
}
if ( pbe != null )
{
// ok, now throw the first one to blow up.
throw pbe;
}
}
#location 47
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public boolean isDependencyUpdateAvailable()
{
for ( Iterator i = dependencyVersions.values().iterator(); i.hasNext(); )
{
ArtifactVersions versions = (ArtifactVersions) i.next();
ArtifactVersion[] dependencyUpdates = versions.getAllUpdates( UpdateScope.ANY, includeSnapshots );
if ( dependencyUpdates != null && dependencyUpdates.length > 0 )
{
return true;
}
}
return false;
}
|
#vulnerable code
public boolean isDependencyUpdateAvailable()
{
for ( Iterator i = dependencyVersions.values().iterator(); i.hasNext(); )
{
ArtifactVersions versions = (ArtifactVersions) i.next();
if ( versions.getAllUpdates( UpdateScope.ANY, includeSnapshots ).length > 0 )
{
return true;
}
}
return false;
}
#location 6
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private void useNextReleases( ModifiedPomXMLEventReader pom, Collection dependencies )
throws XMLStreamException, MojoExecutionException
{
Iterator i = dependencies.iterator();
while ( i.hasNext() )
{
Dependency dep = (Dependency) i.next();
if ( isExcludeReactor() && isProducedByReactor( dep ) )
{
getLog().info( "Ignoring reactor dependency: " + toString( dep ) );
continue;
}
String version = dep.getVersion();
Matcher versionMatcher = matchSnapshotRegex.matcher( version );
if ( !versionMatcher.matches() )
{
getLog().debug( "Looking for newer versions of " + toString( dep ) );
Artifact artifact = this.toArtifact( dep );
if ( !isIncluded( artifact ) )
{
continue;
}
ArtifactVersions versions = getHelper().lookupArtifactVersions( artifact, false );
ArtifactVersion[] newer = versions.getNewerVersions( version, false );
if ( newer.length > 0 )
{
String newVersion = newer[0].toString();
if ( PomHelper.setDependencyVersion( pom, dep.getGroupId(), dep.getArtifactId(), version,
newVersion ) )
{
getLog().info( "Updated " + toString( dep ) + " to version " + newVersion );
}
}
}
}
}
|
#vulnerable code
private void useNextReleases( ModifiedPomXMLEventReader pom, Collection dependencies )
throws XMLStreamException, MojoExecutionException
{
Iterator i = dependencies.iterator();
while ( i.hasNext() )
{
Dependency dep = (Dependency) i.next();
if ( isExcludeReactor() && isProducedByReactor( dep ) )
{
continue;
}
String version = dep.getVersion();
Matcher versionMatcher = matchSnapshotRegex.matcher( version );
if ( !versionMatcher.matches() )
{
Artifact artifact = this.findArtifact( dep );
if ( !isIncluded( artifact ) )
{
continue;
}
ArtifactVersions versions = getHelper().lookupArtifactVersions( artifact, false );
try
{
ArtifactVersion[] newer = versions.getNewerVersions( artifact.getSelectedVersion(), false );
if ( newer.length > 0 )
{
if ( PomHelper.setDependencyVersion( pom, dep.getGroupId(), dep.getArtifactId(), version,
newer[0].toString() ) )
{
getLog().debug( "Version set to " + newer[0].toString() + " for dependnecy: " + dep );
}
}
}
catch ( OverConstrainedVersionException e )
{
getLog().warn( "This should never happen as your build should not work at all if this is thrown",
e );
}
}
}
}
#location 25
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public static boolean setProjectVersion( final ModifiedPomXMLEventReader pom, final String value )
throws XMLStreamException
{
return setProjectValue( pom, "/project/version", value );
}
|
#vulnerable code
public static boolean setProjectVersion( final ModifiedPomXMLEventReader pom, final String value )
throws XMLStreamException
{
Stack<String> stack = new Stack<String>();
String path = "";
final Pattern matchScopeRegex;
boolean madeReplacement = false;
matchScopeRegex = Pattern.compile( "/project/version" );
pom.rewind();
while ( pom.hasNext() )
{
XMLEvent event = pom.nextEvent();
if ( event.isStartElement() )
{
stack.push( path );
path = path + "/" + event.asStartElement().getName().getLocalPart();
if ( matchScopeRegex.matcher( path ).matches() )
{
pom.mark( 0 );
}
}
if ( event.isEndElement() )
{
if ( matchScopeRegex.matcher( path ).matches() )
{
pom.mark( 1 );
if ( pom.hasMark( 0 ) && pom.hasMark( 1 ) )
{
pom.replaceBetween( 0, 1, value );
madeReplacement = true;
}
pom.clearMark( 0 );
pom.clearMark( 1 );
}
path = stack.pop();
}
}
return madeReplacement;
}
#location 15
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
protected void renderDependencySummaryTableRow( Dependency dependency, ArtifactVersions details,
boolean includeScope, boolean includeClassifier,
boolean includeType )
{
sink.tableRow();
sink.tableCell();
ArtifactVersion[] allUpdates = details.getAllUpdates( UpdateScope.ANY );
if ( allUpdates == null || allUpdates.length == 0 )
{
renderSuccessIcon();
}
else
{
renderWarningIcon();
}
sink.tableCell_();
sink.tableCell();
sink.text( dependency.getGroupId() );
sink.tableCell_();
sink.tableCell();
sink.text( dependency.getArtifactId() );
sink.tableCell_();
sink.tableCell();
sink.text( dependency.getVersion() );
sink.tableCell_();
if ( includeScope )
{
sink.tableCell();
sink.text( dependency.getScope() );
sink.tableCell_();
}
if ( includeClassifier )
{
sink.tableCell();
sink.text( dependency.getClassifier() );
sink.tableCell_();
}
if ( includeType )
{
sink.tableCell();
sink.text( dependency.getType() );
sink.tableCell_();
}
sink.tableCell();
if ( details.getOldestUpdate( UpdateScope.SUBINCREMENTAL ) != null )
{
safeBold();
sink.text( details.getOldestUpdate( UpdateScope.SUBINCREMENTAL ).toString() );
safeBold_();
}
sink.tableCell_();
sink.tableCell();
if ( details.getOldestUpdate( UpdateScope.INCREMENTAL ) != null )
{
safeBold();
sink.text( details.getOldestUpdate( UpdateScope.INCREMENTAL ).toString() );
safeBold_();
}
sink.tableCell_();
sink.tableCell();
if ( details.getOldestUpdate( UpdateScope.MINOR ) != null )
{
safeBold();
sink.text( details.getOldestUpdate( UpdateScope.MINOR ).toString() );
safeBold_();
}
sink.tableCell_();
sink.tableCell();
if ( details.getOldestUpdate( UpdateScope.MAJOR ) != null )
{
safeBold();
sink.text( details.getOldestUpdate( UpdateScope.MAJOR ).toString() );
safeBold_();
}
sink.tableCell_();
sink.tableRow_();
}
|
#vulnerable code
protected void renderDependencySummaryTableRow( Dependency dependency, ArtifactVersions details,
boolean includeScope, boolean includeClassifier,
boolean includeType )
{
sink.tableRow();
sink.tableCell();
if ( details.getAllUpdates( UpdateScope.ANY ).length == 0 )
{
renderSuccessIcon();
}
else
{
renderWarningIcon();
}
sink.tableCell_();
sink.tableCell();
sink.text( dependency.getGroupId() );
sink.tableCell_();
sink.tableCell();
sink.text( dependency.getArtifactId() );
sink.tableCell_();
sink.tableCell();
sink.text( dependency.getVersion() );
sink.tableCell_();
if ( includeScope )
{
sink.tableCell();
sink.text( dependency.getScope() );
sink.tableCell_();
}
if ( includeClassifier )
{
sink.tableCell();
sink.text( dependency.getClassifier() );
sink.tableCell_();
}
if ( includeType )
{
sink.tableCell();
sink.text( dependency.getType() );
sink.tableCell_();
}
sink.tableCell();
if ( details.getOldestUpdate( UpdateScope.SUBINCREMENTAL ) != null )
{
safeBold();
sink.text( details.getOldestUpdate( UpdateScope.SUBINCREMENTAL ).toString() );
safeBold_();
}
sink.tableCell_();
sink.tableCell();
if ( details.getOldestUpdate( UpdateScope.INCREMENTAL ) != null )
{
safeBold();
sink.text( details.getOldestUpdate( UpdateScope.INCREMENTAL ).toString() );
safeBold_();
}
sink.tableCell_();
sink.tableCell();
if ( details.getOldestUpdate( UpdateScope.MINOR ) != null )
{
safeBold();
sink.text( details.getOldestUpdate( UpdateScope.MINOR ).toString() );
safeBold_();
}
sink.tableCell_();
sink.tableCell();
if ( details.getOldestUpdate( UpdateScope.MAJOR ) != null )
{
safeBold();
sink.text( details.getOldestUpdate( UpdateScope.MAJOR ).toString() );
safeBold_();
}
sink.tableCell_();
sink.tableRow_();
}
#location 7
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
protected final void writeFile( File outFile, StringBuffer input )
throws IOException
{
Writer writer = WriterFactory.newXmlWriter( outFile );
try
{
IOUtil.copy( input.toString(), writer );
}
finally
{
IOUtil.close( writer );
}
}
|
#vulnerable code
protected final void writeFile( File outFile, StringBuffer input )
throws IOException
{
OutputStream out = new BufferedOutputStream( new FileOutputStream( outFile ) );
out.write( input.toString().getBytes( PomHelper.POM_ENCODING ) );
out.close();
}
#location 7
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
protected void renderDependencyDetailTable( Dependency dependency, ArtifactVersions details, boolean includeScope,
boolean includeClassifier, boolean includeType )
{
final String cellWidth = "80%";
final String headerWidth = "20%";
sink.table();
sink.tableRows( new int[]{Parser.JUSTIFY_RIGHT, Parser.JUSTIFY_LEFT}, false );
sink.tableRow();
sink.tableHeaderCell( headerWidth );
sink.text( getText( "report.status" ) );
sink.tableHeaderCell_();
sink.tableCell( cellWidth );
ArtifactVersion[] versions = details.getAllUpdates( UpdateScope.ANY );
if ( details.getOldestUpdate( UpdateScope.SUBINCREMENTAL ) != null )
{
renderWarningIcon();
sink.nonBreakingSpace();
sink.text( getText( "report.otherUpdatesAvailable" ) );
}
else if ( details.getOldestUpdate( UpdateScope.INCREMENTAL ) != null )
{
renderWarningIcon();
sink.nonBreakingSpace();
sink.text( getText( "report.incrementalUpdatesAvailable" ) );
}
else if ( details.getOldestUpdate( UpdateScope.MINOR ) != null )
{
renderWarningIcon();
sink.nonBreakingSpace();
sink.text( getText( "report.minorUpdatesAvailable" ) );
}
else if ( details.getOldestUpdate( UpdateScope.MAJOR ) != null )
{
renderWarningIcon();
sink.nonBreakingSpace();
sink.text( getText( "report.majorUpdatesAvailable" ) );
}
else
{
renderSuccessIcon();
sink.nonBreakingSpace();
sink.text( getText( "report.noUpdatesAvailable" ) );
}
sink.tableCell_();
sink.tableRow_();
sink.tableRow();
sink.tableHeaderCell( headerWidth );
sink.text( getText( "report.groupId" ) );
sink.tableHeaderCell_();
sink.tableCell( cellWidth );
sink.text( dependency.getGroupId() );
sink.tableCell_();
sink.tableRow_();
sink.tableRow();
sink.tableHeaderCell( headerWidth );
sink.text( getText( "report.artifactId" ) );
sink.tableHeaderCell_();
sink.tableCell( cellWidth );
sink.text( dependency.getArtifactId() );
sink.tableCell_();
sink.tableRow_();
sink.tableRow();
sink.tableHeaderCell( headerWidth );
sink.text( getText( "report.currentVersion" ) );
sink.tableHeaderCell_();
sink.tableCell( cellWidth );
sink.text( dependency.getVersion() );
sink.tableCell_();
sink.tableRow_();
if ( includeScope )
{
sink.tableRow();
sink.tableHeaderCell( headerWidth );
sink.text( getText( "report.scope" ) );
sink.tableHeaderCell_();
sink.tableCell( cellWidth );
sink.text( dependency.getScope() );
sink.tableCell_();
sink.tableRow_();
}
if ( includeClassifier )
{
sink.tableRow();
sink.tableHeaderCell( headerWidth );
sink.text( getText( "report.classifier" ) );
sink.tableHeaderCell_();
sink.tableCell( cellWidth );
sink.text( dependency.getClassifier() );
sink.tableCell_();
sink.tableRow_();
}
if ( includeType )
{
sink.tableRow();
sink.tableHeaderCell( headerWidth );
sink.text( getText( "report.type" ) );
sink.tableHeaderCell_();
sink.tableCell( cellWidth );
sink.text( dependency.getType() );
sink.tableCell_();
sink.tableRow_();
}
if ( versions != null && versions.length > 0 )
{
sink.tableRow();
sink.tableHeaderCell( headerWidth );
sink.text( getText( "report.updateVersions" ) );
sink.tableHeaderCell_();
sink.tableCell( cellWidth );
for ( int i = 0; i < versions.length; i++ )
{
if ( i > 0 )
{
sink.lineBreak();
}
boolean bold = equals( versions[i], details.getOldestUpdate( UpdateScope.SUBINCREMENTAL ) )
|| equals( versions[i], details.getOldestUpdate( UpdateScope.INCREMENTAL ) )
|| equals( versions[i], details.getNewestUpdate( UpdateScope.INCREMENTAL ) )
|| equals( versions[i], details.getOldestUpdate( UpdateScope.MINOR ) )
|| equals( versions[i], details.getNewestUpdate( UpdateScope.MINOR ) )
|| equals( versions[i], details.getOldestUpdate( UpdateScope.MAJOR ) ) || equals( versions[i],
details.getNewestUpdate(
UpdateScope.MAJOR ) );
if ( bold )
{
safeBold();
}
sink.text( versions[i].toString() );
if ( bold )
{
safeBold_();
sink.nonBreakingSpace();
safeItalic();
if ( equals( versions[i], details.getOldestUpdate( UpdateScope.SUBINCREMENTAL ) ) )
{
sink.text( getText( "report.nextVersion" ) );
}
else if ( equals( versions[i], details.getOldestUpdate( UpdateScope.INCREMENTAL ) ) )
{
sink.text( getText( "report.nextIncremental" ) );
}
else if ( equals( versions[i], details.getNewestUpdate( UpdateScope.INCREMENTAL ) ) )
{
sink.text( getText( "report.latestIncremental" ) );
}
else if ( equals( versions[i], details.getOldestUpdate( UpdateScope.MINOR ) ) )
{
sink.text( getText( "report.nextMinor" ) );
}
else if ( equals( versions[i], details.getNewestUpdate( UpdateScope.MINOR ) ) )
{
sink.text( getText( "report.latestMinor" ) );
}
else if ( equals( versions[i], details.getOldestUpdate( UpdateScope.MAJOR ) ) )
{
sink.text( getText( "report.nextMajor" ) );
}
else if ( equals( versions[i], details.getNewestUpdate( UpdateScope.MAJOR ) ) )
{
sink.text( getText( "report.latestMajor" ) );
}
safeItalic_();
}
}
sink.tableCell_();
sink.tableRow_();
}
sink.tableRows_();
sink.table_();
}
|
#vulnerable code
protected void renderDependencyDetailTable( Dependency dependency, ArtifactVersions details, boolean includeScope,
boolean includeClassifier, boolean includeType )
{
final String cellWidth = "80%";
final String headerWidth = "20%";
sink.table();
sink.tableRows( new int[]{Parser.JUSTIFY_RIGHT, Parser.JUSTIFY_LEFT}, false );
sink.tableRow();
sink.tableHeaderCell( headerWidth );
sink.text( getText( "report.status" ) );
sink.tableHeaderCell_();
sink.tableCell( cellWidth );
ArtifactVersion[] versions = details.getAllUpdates( UpdateScope.ANY );
if ( details.getOldestUpdate( UpdateScope.SUBINCREMENTAL ) != null )
{
renderWarningIcon();
sink.nonBreakingSpace();
sink.text( getText( "report.otherUpdatesAvailable" ) );
}
else if ( details.getOldestUpdate( UpdateScope.INCREMENTAL ) != null )
{
renderWarningIcon();
sink.nonBreakingSpace();
sink.text( getText( "report.incrementalUpdatesAvailable" ) );
}
else if ( details.getOldestUpdate( UpdateScope.MINOR ) != null )
{
renderWarningIcon();
sink.nonBreakingSpace();
sink.text( getText( "report.minorUpdatesAvailable" ) );
}
else if ( details.getOldestUpdate( UpdateScope.MAJOR ) != null )
{
renderWarningIcon();
sink.nonBreakingSpace();
sink.text( getText( "report.majorUpdatesAvailable" ) );
}
else
{
renderSuccessIcon();
sink.nonBreakingSpace();
sink.text( getText( "report.noUpdatesAvailable" ) );
}
sink.tableCell_();
sink.tableRow_();
sink.tableRow();
sink.tableHeaderCell( headerWidth );
sink.text( getText( "report.groupId" ) );
sink.tableHeaderCell_();
sink.tableCell( cellWidth );
sink.text( dependency.getGroupId() );
sink.tableCell_();
sink.tableRow_();
sink.tableRow();
sink.tableHeaderCell( headerWidth );
sink.text( getText( "report.artifactId" ) );
sink.tableHeaderCell_();
sink.tableCell( cellWidth );
sink.text( dependency.getArtifactId() );
sink.tableCell_();
sink.tableRow_();
sink.tableRow();
sink.tableHeaderCell( headerWidth );
sink.text( getText( "report.currentVersion" ) );
sink.tableHeaderCell_();
sink.tableCell( cellWidth );
sink.text( dependency.getVersion() );
sink.tableCell_();
sink.tableRow_();
if ( includeScope )
{
sink.tableRow();
sink.tableHeaderCell( headerWidth );
sink.text( getText( "report.scope" ) );
sink.tableHeaderCell_();
sink.tableCell( cellWidth );
sink.text( dependency.getScope() );
sink.tableCell_();
sink.tableRow_();
}
if ( includeClassifier )
{
sink.tableRow();
sink.tableHeaderCell( headerWidth );
sink.text( getText( "report.classifier" ) );
sink.tableHeaderCell_();
sink.tableCell( cellWidth );
sink.text( dependency.getClassifier() );
sink.tableCell_();
sink.tableRow_();
}
if ( includeType )
{
sink.tableRow();
sink.tableHeaderCell( headerWidth );
sink.text( getText( "report.type" ) );
sink.tableHeaderCell_();
sink.tableCell( cellWidth );
sink.text( dependency.getType() );
sink.tableCell_();
sink.tableRow_();
}
if ( versions.length > 0 )
{
sink.tableRow();
sink.tableHeaderCell( headerWidth );
sink.text( getText( "report.updateVersions" ) );
sink.tableHeaderCell_();
sink.tableCell( cellWidth );
for ( int i = 0; i < versions.length; i++ )
{
if ( i > 0 )
{
sink.lineBreak();
}
boolean bold = equals( versions[i], details.getOldestUpdate( UpdateScope.SUBINCREMENTAL ) )
|| equals( versions[i], details.getOldestUpdate( UpdateScope.INCREMENTAL ) )
|| equals( versions[i], details.getNewestUpdate( UpdateScope.INCREMENTAL ) )
|| equals( versions[i], details.getOldestUpdate( UpdateScope.MINOR ) )
|| equals( versions[i], details.getNewestUpdate( UpdateScope.MINOR ) )
|| equals( versions[i], details.getOldestUpdate( UpdateScope.MAJOR ) ) || equals( versions[i],
details.getNewestUpdate(
UpdateScope.MAJOR ) );
if ( bold )
{
safeBold();
}
sink.text( versions[i].toString() );
if ( bold )
{
safeBold_();
sink.nonBreakingSpace();
safeItalic();
if ( equals( versions[i], details.getOldestUpdate( UpdateScope.SUBINCREMENTAL ) ) )
{
sink.text( getText( "report.nextVersion" ) );
}
else if ( equals( versions[i], details.getOldestUpdate( UpdateScope.INCREMENTAL ) ) )
{
sink.text( getText( "report.nextIncremental" ) );
}
else if ( equals( versions[i], details.getNewestUpdate( UpdateScope.INCREMENTAL ) ) )
{
sink.text( getText( "report.latestIncremental" ) );
}
else if ( equals( versions[i], details.getOldestUpdate( UpdateScope.MINOR ) ) )
{
sink.text( getText( "report.nextMinor" ) );
}
else if ( equals( versions[i], details.getNewestUpdate( UpdateScope.MINOR ) ) )
{
sink.text( getText( "report.latestMinor" ) );
}
else if ( equals( versions[i], details.getOldestUpdate( UpdateScope.MAJOR ) ) )
{
sink.text( getText( "report.nextMajor" ) );
}
else if ( equals( versions[i], details.getNewestUpdate( UpdateScope.MAJOR ) ) )
{
sink.text( getText( "report.latestMajor" ) );
}
safeItalic_();
}
}
sink.tableCell_();
sink.tableRow_();
}
sink.tableRows_();
sink.table_();
}
#location 103
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public static void runExperiment(MultilabelClassifier h, String options[]) throws Exception {
// Help
if(Utils.getOptionPos('h',options) >= 0) {
System.out.println("\nHelp requested");
Evaluation.printOptions(h.listOptions());
return;
}
h.setOptions(options);
//Load Instances
Instances allInstances = null;
String filename = null;
try {
filename = Utils.getOption('t', options);
allInstances = DataSource.read(filename);
} catch(Exception e) {
throw new Exception("[Error] Failed to Load Instances from file '" + filename + "'", e);
}
//Get the Options in the @relation name (in format 'dataset-name: <options>')
String doptions[] = null;
try {
doptions = MLUtils.getDatasetOptions(allInstances);
} catch(Exception e) {
throw new Exception("[Error] Failed to Get Options from @Relation Name", e);
}
//Concatenate the Options in the @relation name to the cmd line options
String full = "";
for(String s : options) {
if (s.length() > 0)
full += (s + " ");
}
for(String s : doptions) {
if (s.length() > 0)
full += (s + " ");
}
options = Utils.splitOptions(full);
//Set Options from the command line, any leftover options will most likely be used in the code that follows
try {
int c = (Utils.getOptionPos('C', options) >= 0) ? Integer.parseInt(Utils.getOption('C',options)) : Integer.parseInt(Utils.getOption('c',options));
// if negative, then invert ...
if ( c < 0) {
c = -c;
allInstances = MLUtils.switchAttributes(allInstances,c);
}
// end
allInstances.setClassIndex(c);
} catch(Exception e) {
System.err.println("[Error] Failed to Set Options from Command Line -- Check\n\t the spelling of the base classifier;\n \t that options are specified in the correct order (respective to the '--' divider); and\n\t that the class index is set properly.");
System.exit(1);
}
//Check for the essential -C option. If still nothing set, we can't continue
if(allInstances.classIndex() < 0)
throw new Exception("You must supply the number of labels either in the @Relation Name of the dataset or on the command line using the option: -C <num. labels>");
//Set Range
if(Utils.getOptionPos('p',options) >= 0) {
// Randomize
if(Utils.getOptionPos('R',options) >= 0) {
allInstances.randomize(new Random());
}
try {
String range = Utils.getOption('p',options);
System.out.println("Selecting Range "+range+"");
RemoveRange remove = new RemoveRange();
remove.setInstancesIndices(range);
remove.setInvertSelection(true);
remove.setInputFormat(allInstances);
allInstances = Filter.useFilter(allInstances, remove);
} catch(Exception e) {
System.out.println(""+e);
e.printStackTrace();
throw new Exception("Failed to Remove Range", e);
}
}
int seed = (Utils.getOptionPos('s',options) >= 0) ? Integer.parseInt(Utils.getOption('s',options)) : 0;
// Randomize (Instances)
if(Utils.getOptionPos('R',options) >= 0) {
allInstances.randomize(new Random(seed));
}
// Randomize (Method)
if (h instanceof Randomizable) {
((Randomizable)h).setSeed(seed + 1); // (@NOTE because previously we were using seed '1' as the default in BaggingML, we want to maintain reproducibility of older results with the same seed).
}
try {
Result r = null;
// Get Split
if(Utils.getOptionPos('x',options) >= 0) {
// CROSS-FOLD-VALIDATION
int numFolds = MLUtils.getIntegerOption(Utils.getOption('x',options),10); // default 10
r = new Result();
Result fold[] = Evaluation.cvModel(h,allInstances,numFolds,(Utils.getOptionPos('T',options) >= 0) ? Utils.getOption('T',options) : "c");
r.info = fold[0].info;
for(String v : fold[0].vals.keySet()) {
r.info.put(v,Result.getValues(v,fold));
}
HashMap<String,double[]> o = Result.getStats(fold);
for(String s : o.keySet()) {
double values[] = o.get(s);
r.info.put(s,Utils.doubleToString(Utils.mean(values),5,3)+" +/- "+Utils.doubleToString(Math.sqrt(Utils.variance(values)),5,3));
}
r.setInfo("Type","CV");
System.out.println(r.toString());
}
else {
// TRAIN/TEST SPLIT
int TRAIN = (int)(allInstances.numInstances() * 0.60), TEST;
if(Utils.getOptionPos("split-percentage",options) >= 0) {
double percentTrain = Double.parseDouble(Utils.getOption("split-percentage",options));
TRAIN = (int)Math.round((allInstances.numInstances() * (percentTrain/100.0)));
}
else if(Utils.getOptionPos("split-number",options) >= 0) {
TRAIN = Integer.parseInt(Utils.getOption("split-number",options));
}
TEST = allInstances.numInstances() - TRAIN;
Instances train = new Instances(allInstances,0,TRAIN);
train.setClassIndex(allInstances.classIndex());
Instances test = new Instances(allInstances,TRAIN,TEST);
test.setClassIndex(allInstances.classIndex());
// Invert the split?
if(Utils.getFlag('i',options)) { //boolean INVERT = Utils.getFlag('i',options);
//Get Debug/Verbosity/Output Level
Instances holder = test;
test = train;
train = holder;
}
// We're going to do parameter tuning
if(Utils.getOptionPos('u',options) >= 0) {
double percentageSplit = Double.parseDouble(Utils.getOption('u',options));
TRAIN = (int)(train.numInstances() * percentageSplit);
TEST = train.numInstances() - TRAIN;
train = new Instances(train,0,TRAIN);
test = new Instances(train,TRAIN,TEST);
}
if (h.getDebug()) System.out.println(":- Dataset -: "+MLUtils.getDatasetName(allInstances)+"\tL="+allInstances.classIndex()+"\tD(t:T)=("+train.numInstances()+":"+test.numInstances()+")\tLC(t:T)="+Utils.roundDouble(MLUtils.labelCardinality(train,allInstances.classIndex()),2)+":"+Utils.roundDouble(MLUtils.labelCardinality(test,allInstances.classIndex()),2)+")");
r = evaluateModel(h,train,test,(Utils.getOptionPos('T',options) >= 0) ? Utils.getOption('T',options) : "c");
r.output = Result.getStats(r);
System.out.println(r.toString());
}
// Save ranking data?
if (Utils.getOptionPos('f',options) >= 0) {
Result.writeResultToFile(r,Utils.getOption('f',options));
}
} catch(Exception e) {
e.printStackTrace();
System.exit(1);
}
System.exit(0);
}
|
#vulnerable code
public static void runExperiment(MultilabelClassifier h, String options[]) throws Exception {
// Help
if(Utils.getOptionPos('h',options) >= 0) {
System.out.println("\nHelp requested");
Evaluation.printOptions(h.listOptions());
return;
}
h.setOptions(options);
//Load Instances
Instances allInstances = null;
try {
String filename = Utils.getOption('t', options);
allInstances = new Instances(new BufferedReader(new FileReader(filename)));
} catch(IOException e) {
throw new Exception("[Error] Failed to Load Instances from file");
}
//Get the Options in the @relation name (in format 'dataset-name: <options>')
String doptions[] = null;
try {
doptions = MLUtils.getDatasetOptions(allInstances);
} catch(Exception e) {
throw new Exception("[Error] Failed to Get Options from @Relation Name");
}
//Concatenate the Options in the @relation name to the cmd line options
String full = "";
for(String s : options) {
if (s.length() > 0)
full += (s + " ");
}
for(String s : doptions) {
if (s.length() > 0)
full += (s + " ");
}
options = Utils.splitOptions(full);
//Set Options from the command line, any leftover options will most likely be used in the code that follows
try {
int c = (Utils.getOptionPos('C', options) >= 0) ? Integer.parseInt(Utils.getOption('C',options)) : Integer.parseInt(Utils.getOption('c',options));
// if negative, then invert ...
if ( c < 0) {
c = -c;
allInstances = MLUtils.switchAttributes(allInstances,c);
}
// end
allInstances.setClassIndex(c);
} catch(Exception e) {
System.err.println("[Error] Failed to Set Options from Command Line -- Check\n\t the spelling of the base classifier;\n \t that options are specified in the correct order (respective to the '--' divider); and\n\t that the class index is set properly.");
System.exit(1);
}
//Check for the essential -C option. If still nothing set, we can't continue
if(allInstances.classIndex() < 0)
throw new Exception("You must supply the number of labels either in the @Relation Name of the dataset or on the command line using the option: -C <num. labels>");
//Set Range
if(Utils.getOptionPos('p',options) >= 0) {
// Randomize
if(Utils.getOptionPos('R',options) >= 0) {
allInstances.randomize(new Random());
}
try {
String range = Utils.getOption('p',options);
System.out.println("Selecting Range "+range+"");
RemoveRange remove = new RemoveRange();
remove.setInstancesIndices(range);
remove.setInvertSelection(true);
remove.setInputFormat(allInstances);
allInstances = Filter.useFilter(allInstances, remove);
} catch(Exception e) {
System.out.println(""+e);
e.printStackTrace();
throw new Exception("Failed to Remove Range");
}
}
int seed = (Utils.getOptionPos('s',options) >= 0) ? Integer.parseInt(Utils.getOption('s',options)) : 0;
// Randomize (Instances)
if(Utils.getOptionPos('R',options) >= 0) {
allInstances.randomize(new Random(seed));
}
// Randomize (Method)
if (h instanceof Randomizable) {
((Randomizable)h).setSeed(seed + 1); // (@NOTE because previously we were using seed '1' as the default in BaggingML, we want to maintain reproducibility of older results with the same seed).
}
try {
Result r = null;
// Get Split
if(Utils.getOptionPos('x',options) >= 0) {
// CROSS-FOLD-VALIDATION
int numFolds = MLUtils.getIntegerOption(Utils.getOption('x',options),10); // default 10
r = new Result();
Result fold[] = Evaluation.cvModel(h,allInstances,numFolds,(Utils.getOptionPos('T',options) >= 0) ? Utils.getOption('T',options) : "c");
r.info = fold[0].info;
for(String v : fold[0].vals.keySet()) {
r.info.put(v,Result.getValues(v,fold));
}
HashMap<String,double[]> o = Result.getStats(fold);
for(String s : o.keySet()) {
double values[] = o.get(s);
r.info.put(s,Utils.doubleToString(Utils.mean(values),5,3)+" +/- "+Utils.doubleToString(Math.sqrt(Utils.variance(values)),5,3));
}
r.setInfo("Type","CV");
System.out.println(r.toString());
}
else {
// TRAIN/TEST SPLIT
int TRAIN = (int)(allInstances.numInstances() * 0.60), TEST;
if(Utils.getOptionPos("split-percentage",options) >= 0) {
double percentTrain = Double.parseDouble(Utils.getOption("split-percentage",options));
TRAIN = (int)Math.round((allInstances.numInstances() * (percentTrain/100.0)));
}
else if(Utils.getOptionPos("split-number",options) >= 0) {
TRAIN = Integer.parseInt(Utils.getOption("split-number",options));
}
TEST = allInstances.numInstances() - TRAIN;
Instances train = new Instances(allInstances,0,TRAIN);
train.setClassIndex(allInstances.classIndex());
Instances test = new Instances(allInstances,TRAIN,TEST);
test.setClassIndex(allInstances.classIndex());
// Invert the split?
if(Utils.getFlag('i',options)) { //boolean INVERT = Utils.getFlag('i',options);
//Get Debug/Verbosity/Output Level
Instances holder = test;
test = train;
train = holder;
}
// We're going to do parameter tuning
if(Utils.getOptionPos('u',options) >= 0) {
double percentageSplit = Double.parseDouble(Utils.getOption('u',options));
TRAIN = (int)(train.numInstances() * percentageSplit);
TEST = train.numInstances() - TRAIN;
train = new Instances(train,0,TRAIN);
test = new Instances(train,TRAIN,TEST);
}
if (h.getDebug()) System.out.println(":- Dataset -: "+MLUtils.getDatasetName(allInstances)+"\tL="+allInstances.classIndex()+"\tD(t:T)=("+train.numInstances()+":"+test.numInstances()+")\tLC(t:T)="+Utils.roundDouble(MLUtils.labelCardinality(train,allInstances.classIndex()),2)+":"+Utils.roundDouble(MLUtils.labelCardinality(test,allInstances.classIndex()),2)+")");
r = evaluateModel(h,train,test,(Utils.getOptionPos('T',options) >= 0) ? Utils.getOption('T',options) : "c");
r.output = Result.getStats(r);
System.out.println(r.toString());
}
// Save ranking data?
if (Utils.getOptionPos('f',options) >= 0) {
Result.writeResultToFile(r,Utils.getOption('f',options));
}
} catch(Exception e) {
e.printStackTrace();
System.exit(1);
}
System.exit(0);
}
#location 16
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public JsonParser createParser(File f) throws IOException {
IOContext ctxt = _createContext(f, true);
return _createParser(_decorate(new FileInputStream(f), ctxt), ctxt);
}
|
#vulnerable code
@Override
public JsonParser createParser(File f) throws IOException {
return _createParser(new FileInputStream(f), _createContext(f, true));
}
#location 3
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void run() {
CodeGenerator codegenerator = GeneratorFactory.getGenerator(CodeGenerator.class, "default");
if (codegenerator == null) {
LOGGER.warn("Not CodeGenerator found");
return;
}
CodegenConfigurator configurator = new CodegenConfigurator();
// add additional property
Optional.ofNullable(properties).ifPresent(properties ->
Arrays.stream(properties.split(",")).forEach(property -> {
String[] split = property.split("=");
if (split != null && split.length == 2) {
configurator.addAdditionalProperty(split[0], split[1]);
}
})
);
configurator.setOutputDir(output)
.setGroupId(groupId)
.setArtifactId(artifactId)
.setArtifactVersion(artifactVersion)
.setLibrary(programmingModel)
.setGeneratorName(framework)
.setApiPackage(apiPackage)
.setModelPackage(modelPackage);
configurator.addAdditionalProperty(ProjectMetaConstant.SERVICE_TYPE, serviceType);
if (isNotEmpty(specFile)) {
File contractFile = new File(specFile);
if (contractFile.isDirectory()) {
try {
Files.walkFileTree(Paths.get(contractFile.toURI()), new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
configurator.setInputSpec(file.toFile().getCanonicalPath())
.addAdditionalProperty("apiName", file.toFile().getName().split("\\.")[0]);
try {
codegenerator.configure(Collections.singletonMap("configurator", configurator));
codegenerator.generate();
} catch (RuntimeException e) {
throw new RuntimeException("Failed to generate code base on file " + file.toFile().getName());
}
return super.visitFile(file, attrs);
}
});
} catch (RuntimeException | IOException e) {
LOGGER.error(e.getMessage());
return;
}
} else {
configurator.setInputSpec(specFile).addAdditionalProperty("apiName", contractFile.getName().split("\\.")[0]);
codegenerator.configure(Collections.singletonMap("configurator", configurator));
codegenerator.generate();
}
LOGGER.info("Success to generate code, the directory is: {}", output);
}
}
|
#vulnerable code
@Override
public void run() {
CodegenConfigurator configurator = new CodegenConfigurator();
CodeGenerator codegenerator = GeneratorFactory.getGenerator(CodeGenerator.class, "default");
// add additional property
Optional.ofNullable(properties).ifPresent(properties ->
Arrays.stream(properties.split(",")).forEach(property -> {
String[] split = property.split("=");
if (split != null && split.length == 2) {
configurator.addAdditionalProperty(split[0], split[1]);
}
})
);
configurator.setOutputDir(output)
.setGroupId(groupId)
.setArtifactId(artifactId)
.setArtifactVersion(artifactVersion)
.setLibrary(programmingModel)
.setGeneratorName(framework)
.setApiPackage(apiPackage)
.setModelPackage(modelPackage);
configurator.addAdditionalProperty(ProjectMetaConstant.SERVICE_TYPE, serviceType);
if (isNotEmpty(specFile)) {
File contractFile = new File(specFile);
if (contractFile.isDirectory()) {
try {
Files.walkFileTree(Paths.get(contractFile.toURI()), new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
configurator.setInputSpec(file.toFile().getCanonicalPath())
.addAdditionalProperty("apiName", file.toFile().getName().split("\\.")[0]);
try {
codegenerator.configure(Collections.singletonMap("configurator", configurator));
codegenerator.generate();
} catch (RuntimeException e) {
throw new RuntimeException("Failed to generate code base on file " + file.toFile().getName());
}
return super.visitFile(file, attrs);
}
});
} catch (RuntimeException | IOException e) {
LOGGER.error(e.getMessage());
return;
}
} else {
configurator.setInputSpec(specFile);
codegenerator.configure(Collections.singletonMap("configurator", configurator));
codegenerator.generate();
}
LOGGER.info("Success to generate code, the directory is: {}", output);
}
}
#location 43
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public final void doHandle(HttpServletRequest baseRequest,
HttpServletRequest request, HttpServletResponse response,
InputStream is) throws IOException, S3Exception {
String method = request.getMethod();
String uri = request.getRequestURI();
if (!this.servicePath.isEmpty()) {
if (uri.length() > this.servicePath.length()) {
uri = uri.substring(this.servicePath.length());
}
}
logger.debug("request: {}", request);
String hostHeader = request.getHeader(HttpHeaders.HOST);
if (hostHeader != null && virtualHost.isPresent()) {
hostHeader = HostAndPort.fromString(hostHeader).getHostText();
String virtualHostSuffix = "." + virtualHost.get();
if (!hostHeader.equals(virtualHost.get())) {
if (hostHeader.endsWith(virtualHostSuffix)) {
String bucket = hostHeader.substring(0,
hostHeader.length() - virtualHostSuffix.length());
uri = "/" + bucket + uri;
} else {
String bucket = hostHeader.toLowerCase();
uri = "/" + bucket + uri;
}
}
}
boolean hasDateHeader = false;
boolean hasXAmzDateHeader = false;
for (String headerName : Collections.list(request.getHeaderNames())) {
for (String headerValue : Collections.list(request.getHeaders(
headerName))) {
logger.debug("header: {}: {}", headerName,
Strings.nullToEmpty(headerValue));
}
if (headerName.equalsIgnoreCase(HttpHeaders.DATE)) {
hasDateHeader = true;
} else if (headerName.equalsIgnoreCase("x-amz-date")) {
logger.debug("have the x-amz-date heaer {}", headerName);
// why x-amz-date name exist,but value is null?
if ("".equals(request.getHeader("x-amz-date")) ||
request.getHeader("x-amz-date") == null) {
logger.debug("have empty x-amz-date");
} else {
hasXAmzDateHeader = true;
}
}
}
boolean haveBothDateHeader = false;
if (hasDateHeader && hasXAmzDateHeader) {
haveBothDateHeader = true;
}
// when access information is not provided in request header,
// treat it as anonymous, return all public accessible information
if (!anonymousIdentity &&
(method.equals("GET") || method.equals("HEAD") ||
method.equals("POST")) &&
request.getHeader(HttpHeaders.AUTHORIZATION) == null &&
// v2 or /v4
request.getParameter("X-Amz-Algorithm") == null && // v4 query
request.getParameter("AWSAccessKeyId") == null && // v2 query
defaultBlobStore != null) {
doHandleAnonymous(request, response, is, uri, defaultBlobStore);
return;
}
// should according the AWSAccessKeyId= Signature or auth header nil
if (!anonymousIdentity && !hasDateHeader && !hasXAmzDateHeader &&
request.getParameter("X-Amz-Date") == null &&
request.getParameter("Expires") == null) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED,
"AWS authentication requires a valid Date or" +
" x-amz-date header");
}
BlobStore blobStore;
String requestIdentity = null;
String headerAuthorization = request.getHeader(
HttpHeaders.AUTHORIZATION);
S3AuthorizationHeader authHeader = null;
boolean presignedUrl = false;
if (!anonymousIdentity) {
if (headerAuthorization == null) {
String algorithm = request.getParameter("X-Amz-Algorithm");
if (algorithm == null) { //v2 query
String identity = request.getParameter("AWSAccessKeyId");
String signature = request.getParameter("Signature");
if (identity == null || signature == null) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
headerAuthorization = "AWS " + identity + ":" + signature;
presignedUrl = true;
} else if (algorithm.equals("AWS4-HMAC-SHA256")) { //v4 query
String credential = request.getParameter(
"X-Amz-Credential");
String signedHeaders = request.getParameter(
"X-Amz-SignedHeaders");
String signature = request.getParameter(
"X-Amz-Signature");
if (credential == null || signedHeaders == null ||
signature == null) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
headerAuthorization = "AWS4-HMAC-SHA256" +
" Credential=" + credential +
", requestSignedHeaders=" + signedHeaders +
", Signature=" + signature;
presignedUrl = true;
}
}
try {
authHeader = new S3AuthorizationHeader(headerAuthorization);
//whether v2 or v4 (normal header and query)
} catch (IllegalArgumentException iae) {
throw new S3Exception(S3ErrorCode.INVALID_ARGUMENT, iae);
}
requestIdentity = authHeader.identity;
}
long dateSkew = 0; //date for timeskew check
//v2 GET /s3proxy-1080747708/foo?AWSAccessKeyId=local-identity&Expires=
//1510322602&Signature=UTyfHY1b1Wgr5BFEn9dpPlWdtFE%3D)
//have no date
if (!anonymousIdentity) {
boolean haveDate = true;
AuthenticationType finalAuthType = null;
if (authHeader.authenticationType == AuthenticationType.AWS_V2 &&
(authenticationType == AuthenticationType.AWS_V2 ||
authenticationType == AuthenticationType.AWS_V2_OR_V4)) {
finalAuthType = AuthenticationType.AWS_V2;
} else if (
authHeader.authenticationType == AuthenticationType.AWS_V4 &&
(authenticationType == AuthenticationType.AWS_V4 ||
authenticationType == AuthenticationType.AWS_V2_OR_V4)) {
finalAuthType = AuthenticationType.AWS_V4;
} else if (authenticationType != AuthenticationType.NONE) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
if (hasXAmzDateHeader) { //format diff between v2 and v4
if (finalAuthType == AuthenticationType.AWS_V2) {
dateSkew = request.getDateHeader("x-amz-date");
dateSkew /= 1000;
//case sensetive?
} else if (finalAuthType == AuthenticationType.AWS_V4) {
logger.debug("into process v4 {}",
request.getHeader("x-amz-date"));
dateSkew = parseIso8601(request.getHeader("x-amz-date"));
}
} else if (request.getParameter("X-Amz-Date") != null) { // v4 query
String dateString = request.getParameter("X-Amz-Date");
dateSkew = parseIso8601(dateString);
} else if (hasDateHeader) {
try {
dateSkew = request.getDateHeader(HttpHeaders.DATE);
logger.debug("dateheader {}", dateSkew);
} catch (IllegalArgumentException iae) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED, iae);
}
dateSkew /= 1000;
logger.debug("dateheader {}", dateSkew);
} else {
haveDate = false;
}
logger.debug("dateSkew {}", dateSkew);
if (haveDate) {
isTimeSkewed(dateSkew);
}
}
String[] path = uri.split("/", 3);
for (int i = 0; i < path.length; i++) {
path[i] = URLDecoder.decode(path[i], "UTF-8");
}
Map.Entry<String, BlobStore> provider =
blobStoreLocator.locateBlobStore(
requestIdentity, path.length > 1 ? path[1] : null,
path.length > 2 ? path[2] : null);
if (anonymousIdentity) {
blobStore = provider.getValue();
String contentSha256 = request.getHeader("x-amz-content-sha256");
if ("STREAMING-AWS4-HMAC-SHA256-PAYLOAD".equals(contentSha256)) {
is = new ChunkedInputStream(is);
}
} else if (requestIdentity == null) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
} else {
if (provider == null) {
throw new S3Exception(S3ErrorCode.INVALID_ACCESS_KEY_ID);
}
String credential = provider.getKey();
blobStore = provider.getValue();
String expiresString = request.getParameter("Expires");
if (expiresString != null) { // v2 query
long expires = Long.parseLong(expiresString);
long nowSeconds = System.currentTimeMillis() / 1000;
if (nowSeconds >= expires) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
}
String dateString = request.getParameter("X-Amz-Date");
//from para v4 query
expiresString = request.getParameter("X-Amz-Expires");
if (dateString != null && expiresString != null) { //v4 query
long date = parseIso8601(dateString);
long expires = Long.parseLong(expiresString);
long nowSeconds = System.currentTimeMillis() / 1000;
if (nowSeconds >= date + expires) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED,
"Request has expired");
}
}
// The aim ?
switch (authHeader.authenticationType) {
case AWS_V2:
switch (authenticationType) {
case AWS_V2:
case AWS_V2_OR_V4:
case NONE:
break;
default:
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
break;
case AWS_V4:
switch (authenticationType) {
case AWS_V4:
case AWS_V2_OR_V4:
case NONE:
break;
default:
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
break;
case NONE:
break;
default:
throw new IllegalArgumentException("Unhandled type: " +
authHeader.authenticationType);
}
String expectedSignature = null;
// When presigned url is generated, it doesn't consider service path
String uriForSigning = presignedUrl ? uri : this.servicePath + uri;
if (authHeader.hmacAlgorithm == null) { //v2
expectedSignature = AwsSignature.createAuthorizationSignature(
request, uriForSigning, credential, presignedUrl,
haveBothDateHeader);
} else {
String contentSha256 = request.getHeader(
"x-amz-content-sha256");
try {
byte[] payload;
if (request.getParameter("X-Amz-Algorithm") != null) {
payload = new byte[0];
} else if ("STREAMING-AWS4-HMAC-SHA256-PAYLOAD".equals(
contentSha256)) {
payload = new byte[0];
is = new ChunkedInputStream(is);
} else if ("UNSIGNED-PAYLOAD".equals(contentSha256)) {
payload = new byte[0];
} else {
// buffer the entire stream to calculate digest
// why input stream read contentlength of header?
payload = ByteStreams.toByteArray(ByteStreams.limit(
is, v4MaxNonChunkedRequestSize + 1));
if (payload.length == v4MaxNonChunkedRequestSize + 1) {
throw new S3Exception(
S3ErrorCode.MAX_MESSAGE_LENGTH_EXCEEDED);
}
// maybe we should check this when signing,
// a lot of dup code with aws sign code.
MessageDigest md = MessageDigest.getInstance(
authHeader.hashAlgorithm);
byte[] hash = md.digest(payload);
if (!contentSha256.equals(
BaseEncoding.base16().lowerCase()
.encode(hash))) {
throw new S3Exception(
S3ErrorCode
.X_AMZ_CONTENT_S_H_A_256_MISMATCH);
}
is = new ByteArrayInputStream(payload);
}
expectedSignature = AwsSignature
.createAuthorizationSignatureV4(// v4 sign
baseRequest, authHeader, payload, uriForSigning,
credential);
} catch (InvalidKeyException | NoSuchAlgorithmException e) {
throw new S3Exception(S3ErrorCode.INVALID_ARGUMENT, e);
}
}
if (!expectedSignature.equals(authHeader.signature)) {
logger.debug("fail to validate signature");
throw new S3Exception(S3ErrorCode.SIGNATURE_DOES_NOT_MATCH);
}
}
for (String parameter : Collections.list(
request.getParameterNames())) {
if (UNSUPPORTED_PARAMETERS.contains(parameter)) {
logger.error("Unknown parameters {} with URI {}",
parameter, request.getRequestURI());
throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED);
}
}
// emit NotImplemented for unknown x-amz- headers
for (String headerName : Collections.list(request.getHeaderNames())) {
if (ignoreUnknownHeaders) {
continue;
}
if (!headerName.startsWith("x-amz-")) {
continue;
}
if (headerName.startsWith("x-amz-meta-")) {
continue;
}
if (headerName.equals("x-amz-storage-class") &&
request.getHeader(headerName).equals("STANDARD")) {
continue;
}
if (!SUPPORTED_X_AMZ_HEADERS.contains(headerName.toLowerCase())) {
logger.error("Unknown header {} with URI {}",
headerName, request.getRequestURI());
throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED);
}
}
// Validate container name
if (!uri.equals("/") && !isValidContainer(path[1])) {
if (method.equals("PUT") &&
(path.length <= 2 || path[2].isEmpty()) &&
!("".equals(request.getParameter("acl")))) {
throw new S3Exception(S3ErrorCode.INVALID_BUCKET_NAME);
} else {
throw new S3Exception(S3ErrorCode.NO_SUCH_BUCKET);
}
}
String uploadId = request.getParameter("uploadId");
switch (method) {
case "DELETE":
if (path.length <= 2 || path[2].isEmpty()) {
handleContainerDelete(response, blobStore, path[1]);
return;
} else if (uploadId != null) {
handleAbortMultipartUpload(response, blobStore, path[1],
path[2], uploadId);
return;
} else {
handleBlobRemove(response, blobStore, path[1], path[2]);
return;
}
case "GET":
if (uri.equals("/")) {
handleContainerList(response, blobStore);
return;
} else if (path.length <= 2 || path[2].isEmpty()) {
if ("".equals(request.getParameter("acl"))) {
handleGetContainerAcl(response, blobStore, path[1]);
return;
} else if ("".equals(request.getParameter("location"))) {
handleContainerLocation(response, blobStore, path[1]);
return;
} else if ("".equals(request.getParameter("uploads"))) {
handleListMultipartUploads(request, response, blobStore,
path[1]);
return;
}
handleBlobList(request, response, blobStore, path[1]);
return;
} else {
if ("".equals(request.getParameter("acl"))) {
handleGetBlobAcl(response, blobStore, path[1],
path[2]);
return;
} else if (uploadId != null) {
handleListParts(request, response, blobStore, path[1],
path[2], uploadId);
return;
}
handleGetBlob(request, response, blobStore, path[1],
path[2]);
return;
}
case "HEAD":
if (path.length <= 2 || path[2].isEmpty()) {
handleContainerExists(blobStore, path[1]);
return;
} else {
handleBlobMetadata(request, response, blobStore, path[1],
path[2]);
return;
}
case "POST":
if ("".equals(request.getParameter("delete"))) {
handleMultiBlobRemove(response, is, blobStore, path[1]);
return;
} else if ("".equals(request.getParameter("uploads"))) {
handleInitiateMultipartUpload(request, response, blobStore,
path[1], path[2]);
return;
} else if (uploadId != null &&
request.getParameter("partNumber") == null) {
handleCompleteMultipartUpload(response, is, blobStore, path[1],
path[2], uploadId);
return;
}
break;
case "PUT":
if (path.length <= 2 || path[2].isEmpty()) {
if ("".equals(request.getParameter("acl"))) {
handleSetContainerAcl(request, response, is, blobStore,
path[1]);
return;
}
handleContainerCreate(request, response, is, blobStore,
path[1]);
return;
} else if (uploadId != null) {
if (request.getHeader("x-amz-copy-source") != null) {
handleCopyPart(request, response, blobStore, path[1],
path[2], uploadId);
} else {
handleUploadPart(request, response, is, blobStore, path[1],
path[2], uploadId);
}
return;
} else if (request.getHeader("x-amz-copy-source") != null) {
handleCopyBlob(request, response, is, blobStore, path[1],
path[2]);
return;
} else {
if ("".equals(request.getParameter("acl"))) {
handleSetBlobAcl(request, response, is, blobStore, path[1],
path[2]);
return;
}
handlePutBlob(request, response, is, blobStore, path[1],
path[2]);
return;
}
default:
break;
}
logger.error("Unknown method {} with URI {}",
method, request.getRequestURI());
throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED);
}
|
#vulnerable code
public final void doHandle(HttpServletRequest baseRequest,
HttpServletRequest request, HttpServletResponse response,
InputStream is) throws IOException, S3Exception {
String method = request.getMethod();
String uri = request.getRequestURI();
if (!this.servicePath.isEmpty()) {
if (uri.length() > this.servicePath.length()) {
uri = uri.substring(this.servicePath.length());
}
}
logger.debug("request: {}", request);
String hostHeader = request.getHeader(HttpHeaders.HOST);
if (hostHeader != null && virtualHost.isPresent()) {
hostHeader = HostAndPort.fromString(hostHeader).getHostText();
String virtualHostSuffix = "." + virtualHost.get();
if (!hostHeader.equals(virtualHost.get())) {
if (hostHeader.endsWith(virtualHostSuffix)) {
String bucket = hostHeader.substring(0,
hostHeader.length() - virtualHostSuffix.length());
uri = "/" + bucket + uri;
} else {
String bucket = hostHeader.toLowerCase();
uri = "/" + bucket + uri;
}
}
}
boolean hasDateHeader = false;
boolean hasXAmzDateHeader = false;
for (String headerName : Collections.list(request.getHeaderNames())) {
for (String headerValue : Collections.list(request.getHeaders(
headerName))) {
logger.debug("header: {}: {}", headerName,
Strings.nullToEmpty(headerValue));
}
if (headerName.equalsIgnoreCase(HttpHeaders.DATE)) {
hasDateHeader = true;
} else if (headerName.equalsIgnoreCase("x-amz-date")) {
logger.debug("have the x-amz-date heaer {}", headerName);
// why x-amz-date name exist,but value is null?
if ("".equals(request.getHeader("x-amz-date")) ||
request.getHeader("x-amz-date") == null) {
logger.debug("have empty x-amz-date");
} else {
hasXAmzDateHeader = true;
}
}
}
boolean haveBothDateHeader = false;
if (hasDateHeader && hasXAmzDateHeader) {
haveBothDateHeader = true;
}
// when access information is not provided in request header,
// treat it as anonymous, return all public accessible information
if (!anonymousIdentity &&
(method.equals("GET") || method.equals("HEAD") ||
method.equals("POST")) &&
request.getHeader(HttpHeaders.AUTHORIZATION) == null &&
// v2 or /v4
request.getParameter("X-Amz-Algorithm") == null && // v4 query
request.getParameter("AWSAccessKeyId") == null && // v2 query
defaultBlobStore != null) {
doHandleAnonymous(request, response, is, uri, defaultBlobStore);
return;
}
// should according the AWSAccessKeyId= Signature or auth header nil
if (!anonymousIdentity && !hasDateHeader && !hasXAmzDateHeader &&
request.getParameter("X-Amz-Date") == null &&
request.getParameter("Expires") == null) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED,
"AWS authentication requires a valid Date or" +
" x-amz-date header");
}
BlobStore blobStore;
String requestIdentity = null;
String headerAuthorization = request.getHeader(
HttpHeaders.AUTHORIZATION);
S3AuthorizationHeader authHeader = null;
boolean presignedUrl = false;
if (!anonymousIdentity) {
if (headerAuthorization == null) {
String algorithm = request.getParameter("X-Amz-Algorithm");
if (algorithm == null) { //v2 query
String identity = request.getParameter("AWSAccessKeyId");
String signature = request.getParameter("Signature");
if (identity == null || signature == null) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
headerAuthorization = "AWS " + identity + ":" + signature;
presignedUrl = true;
} else if (algorithm.equals("AWS4-HMAC-SHA256")) { //v4 query
String credential = request.getParameter(
"X-Amz-Credential");
String signedHeaders = request.getParameter(
"X-Amz-SignedHeaders");
String signature = request.getParameter(
"X-Amz-Signature");
if (credential == null || signedHeaders == null ||
signature == null) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
headerAuthorization = "AWS4-HMAC-SHA256" +
" Credential=" + credential +
", requestSignedHeaders=" + signedHeaders +
", Signature=" + signature;
presignedUrl = true;
}
}
try {
authHeader = new S3AuthorizationHeader(headerAuthorization);
//whether v2 or v4 (normal header and query)
} catch (IllegalArgumentException iae) {
throw new S3Exception(S3ErrorCode.INVALID_ARGUMENT, iae);
}
requestIdentity = authHeader.identity;
}
long dateSkew = 0; //date for timeskew check
//v2 GET /s3proxy-1080747708/foo?AWSAccessKeyId=local-identity&Expires=
//1510322602&Signature=UTyfHY1b1Wgr5BFEn9dpPlWdtFE%3D)
//have no date
boolean haveDate = true;
AuthenticationType finalAuthType = null;
if (authHeader.authenticationType == AuthenticationType.AWS_V2 &&
(authenticationType == AuthenticationType.AWS_V2 ||
authenticationType == AuthenticationType.AWS_V2_OR_V4)) {
finalAuthType = AuthenticationType.AWS_V2;
} else if (authHeader.authenticationType == AuthenticationType.AWS_V4 &&
(authenticationType == AuthenticationType.AWS_V4 ||
authenticationType == AuthenticationType.AWS_V2_OR_V4)) {
finalAuthType = AuthenticationType.AWS_V4;
} else if (authenticationType != AuthenticationType.NONE) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
if (hasXAmzDateHeader) { //format diff between v2 and v4
if (finalAuthType == AuthenticationType.AWS_V2) {
dateSkew = request.getDateHeader("x-amz-date");
dateSkew /= 1000;
//case sensetive?
} else if (finalAuthType == AuthenticationType.AWS_V4) {
logger.debug("into process v4 {}",
request.getHeader("x-amz-date"));
dateSkew = parseIso8601(request.getHeader("x-amz-date"));
}
} else if (request.getParameter("X-Amz-Date") != null) { // v4 query
String dateString = request.getParameter("X-Amz-Date");
dateSkew = parseIso8601(dateString);
} else if (hasDateHeader) {
try {
dateSkew = request.getDateHeader(HttpHeaders.DATE);
logger.debug("dateheader {}", dateSkew);
} catch (IllegalArgumentException iae) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED, iae);
}
dateSkew /= 1000;
logger.debug("dateheader {}", dateSkew);
} else {
haveDate = false;
}
logger.debug("dateSkew {}", dateSkew);
if (haveDate) {
isTimeSkewed(dateSkew);
}
String[] path = uri.split("/", 3);
for (int i = 0; i < path.length; i++) {
path[i] = URLDecoder.decode(path[i], "UTF-8");
}
Map.Entry<String, BlobStore> provider =
blobStoreLocator.locateBlobStore(
requestIdentity, path.length > 1 ? path[1] : null,
path.length > 2 ? path[2] : null);
if (anonymousIdentity) {
blobStore = provider.getValue();
String contentSha256 = request.getHeader("x-amz-content-sha256");
if ("STREAMING-AWS4-HMAC-SHA256-PAYLOAD".equals(contentSha256)) {
is = new ChunkedInputStream(is);
}
} else if (requestIdentity == null) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
} else {
if (provider == null) {
throw new S3Exception(S3ErrorCode.INVALID_ACCESS_KEY_ID);
}
String credential = provider.getKey();
blobStore = provider.getValue();
String expiresString = request.getParameter("Expires");
if (expiresString != null) { // v2 query
long expires = Long.parseLong(expiresString);
long nowSeconds = System.currentTimeMillis() / 1000;
if (nowSeconds >= expires) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
}
String dateString = request.getParameter("X-Amz-Date");
//from para v4 query
expiresString = request.getParameter("X-Amz-Expires");
if (dateString != null && expiresString != null) { //v4 query
long date = parseIso8601(dateString);
long expires = Long.parseLong(expiresString);
long nowSeconds = System.currentTimeMillis() / 1000;
if (nowSeconds >= date + expires) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED,
"Request has expired");
}
}
// The aim ?
switch (authHeader.authenticationType) {
case AWS_V2:
switch (authenticationType) {
case AWS_V2:
case AWS_V2_OR_V4:
case NONE:
break;
default:
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
break;
case AWS_V4:
switch (authenticationType) {
case AWS_V4:
case AWS_V2_OR_V4:
case NONE:
break;
default:
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
break;
case NONE:
break;
default:
throw new IllegalArgumentException("Unhandled type: " +
authHeader.authenticationType);
}
String expectedSignature = null;
// When presigned url is generated, it doesn't consider service path
String uriForSigning = presignedUrl ? uri : this.servicePath + uri;
if (authHeader.hmacAlgorithm == null) { //v2
expectedSignature = AwsSignature.createAuthorizationSignature(
request, uriForSigning, credential, presignedUrl,
haveBothDateHeader);
} else {
String contentSha256 = request.getHeader(
"x-amz-content-sha256");
try {
byte[] payload;
if (request.getParameter("X-Amz-Algorithm") != null) {
payload = new byte[0];
} else if ("STREAMING-AWS4-HMAC-SHA256-PAYLOAD".equals(
contentSha256)) {
payload = new byte[0];
is = new ChunkedInputStream(is);
} else if ("UNSIGNED-PAYLOAD".equals(contentSha256)) {
payload = new byte[0];
} else {
// buffer the entire stream to calculate digest
// why input stream read contentlength of header?
payload = ByteStreams.toByteArray(ByteStreams.limit(
is, v4MaxNonChunkedRequestSize + 1));
if (payload.length == v4MaxNonChunkedRequestSize + 1) {
throw new S3Exception(
S3ErrorCode.MAX_MESSAGE_LENGTH_EXCEEDED);
}
// maybe we should check this when signing,
// a lot of dup code with aws sign code.
MessageDigest md = MessageDigest.getInstance(
authHeader.hashAlgorithm);
byte[] hash = md.digest(payload);
if (!contentSha256.equals(
BaseEncoding.base16().lowerCase()
.encode(hash))) {
throw new S3Exception(
S3ErrorCode
.X_AMZ_CONTENT_S_H_A_256_MISMATCH);
}
is = new ByteArrayInputStream(payload);
}
expectedSignature = AwsSignature
.createAuthorizationSignatureV4(// v4 sign
baseRequest, authHeader, payload, uriForSigning,
credential);
} catch (InvalidKeyException | NoSuchAlgorithmException e) {
throw new S3Exception(S3ErrorCode.INVALID_ARGUMENT, e);
}
}
if (!expectedSignature.equals(authHeader.signature)) {
logger.debug("fail to validate signature");
throw new S3Exception(S3ErrorCode.SIGNATURE_DOES_NOT_MATCH);
}
}
for (String parameter : Collections.list(
request.getParameterNames())) {
if (UNSUPPORTED_PARAMETERS.contains(parameter)) {
logger.error("Unknown parameters {} with URI {}",
parameter, request.getRequestURI());
throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED);
}
}
// emit NotImplemented for unknown x-amz- headers
for (String headerName : Collections.list(request.getHeaderNames())) {
if (ignoreUnknownHeaders) {
continue;
}
if (!headerName.startsWith("x-amz-")) {
continue;
}
if (headerName.startsWith("x-amz-meta-")) {
continue;
}
if (headerName.equals("x-amz-storage-class") &&
request.getHeader(headerName).equals("STANDARD")) {
continue;
}
if (!SUPPORTED_X_AMZ_HEADERS.contains(headerName.toLowerCase())) {
logger.error("Unknown header {} with URI {}",
headerName, request.getRequestURI());
throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED);
}
}
// Validate container name
if (!uri.equals("/") && !isValidContainer(path[1])) {
if (method.equals("PUT") &&
(path.length <= 2 || path[2].isEmpty()) &&
!("".equals(request.getParameter("acl")))) {
throw new S3Exception(S3ErrorCode.INVALID_BUCKET_NAME);
} else {
throw new S3Exception(S3ErrorCode.NO_SUCH_BUCKET);
}
}
String uploadId = request.getParameter("uploadId");
switch (method) {
case "DELETE":
if (path.length <= 2 || path[2].isEmpty()) {
handleContainerDelete(response, blobStore, path[1]);
return;
} else if (uploadId != null) {
handleAbortMultipartUpload(response, blobStore, path[1],
path[2], uploadId);
return;
} else {
handleBlobRemove(response, blobStore, path[1], path[2]);
return;
}
case "GET":
if (uri.equals("/")) {
handleContainerList(response, blobStore);
return;
} else if (path.length <= 2 || path[2].isEmpty()) {
if ("".equals(request.getParameter("acl"))) {
handleGetContainerAcl(response, blobStore, path[1]);
return;
} else if ("".equals(request.getParameter("location"))) {
handleContainerLocation(response, blobStore, path[1]);
return;
} else if ("".equals(request.getParameter("uploads"))) {
handleListMultipartUploads(request, response, blobStore,
path[1]);
return;
}
handleBlobList(request, response, blobStore, path[1]);
return;
} else {
if ("".equals(request.getParameter("acl"))) {
handleGetBlobAcl(response, blobStore, path[1],
path[2]);
return;
} else if (uploadId != null) {
handleListParts(request, response, blobStore, path[1],
path[2], uploadId);
return;
}
handleGetBlob(request, response, blobStore, path[1],
path[2]);
return;
}
case "HEAD":
if (path.length <= 2 || path[2].isEmpty()) {
handleContainerExists(blobStore, path[1]);
return;
} else {
handleBlobMetadata(request, response, blobStore, path[1],
path[2]);
return;
}
case "POST":
if ("".equals(request.getParameter("delete"))) {
handleMultiBlobRemove(response, is, blobStore, path[1]);
return;
} else if ("".equals(request.getParameter("uploads"))) {
handleInitiateMultipartUpload(request, response, blobStore,
path[1], path[2]);
return;
} else if (uploadId != null &&
request.getParameter("partNumber") == null) {
handleCompleteMultipartUpload(response, is, blobStore, path[1],
path[2], uploadId);
return;
}
break;
case "PUT":
if (path.length <= 2 || path[2].isEmpty()) {
if ("".equals(request.getParameter("acl"))) {
handleSetContainerAcl(request, response, is, blobStore,
path[1]);
return;
}
handleContainerCreate(request, response, is, blobStore,
path[1]);
return;
} else if (uploadId != null) {
if (request.getHeader("x-amz-copy-source") != null) {
handleCopyPart(request, response, blobStore, path[1],
path[2], uploadId);
} else {
handleUploadPart(request, response, is, blobStore, path[1],
path[2], uploadId);
}
return;
} else if (request.getHeader("x-amz-copy-source") != null) {
handleCopyBlob(request, response, is, blobStore, path[1],
path[2]);
return;
} else {
if ("".equals(request.getParameter("acl"))) {
handleSetBlobAcl(request, response, is, blobStore, path[1],
path[2]);
return;
}
handlePutBlob(request, response, is, blobStore, path[1],
path[2]);
return;
}
default:
break;
}
logger.error("Unknown method {} with URI {}",
method, request.getRequestURI());
throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED);
}
#location 118
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public final void doHandle(HttpServletRequest baseRequest,
HttpServletRequest request, HttpServletResponse response,
InputStream is) throws IOException, S3Exception {
String method = request.getMethod();
String uri = request.getRequestURI();
if (!this.servicePath.isEmpty()) {
if (uri.length() > this.servicePath.length()) {
uri = uri.substring(this.servicePath.length());
}
}
logger.debug("request: {}", request);
String hostHeader = request.getHeader(HttpHeaders.HOST);
if (hostHeader != null && virtualHost.isPresent()) {
hostHeader = HostAndPort.fromString(hostHeader).getHostText();
String virtualHostSuffix = "." + virtualHost.get();
if (!hostHeader.equals(virtualHost.get())) {
if (hostHeader.endsWith(virtualHostSuffix)) {
String bucket = hostHeader.substring(0,
hostHeader.length() - virtualHostSuffix.length());
uri = "/" + bucket + uri;
} else {
String bucket = hostHeader.toLowerCase();
uri = "/" + bucket + uri;
}
}
}
boolean hasDateHeader = false;
boolean hasXAmzDateHeader = false;
for (String headerName : Collections.list(request.getHeaderNames())) {
for (String headerValue : Collections.list(request.getHeaders(
headerName))) {
logger.debug("header: {}: {}", headerName,
Strings.nullToEmpty(headerValue));
}
if (headerName.equalsIgnoreCase(HttpHeaders.DATE)) {
hasDateHeader = true;
} else if (headerName.equalsIgnoreCase("x-amz-date")) {
logger.debug("have the x-amz-date heaer {}", headerName);
// why x-amz-date name exist,but value is null?
if ("".equals(request.getHeader("x-amz-date")) ||
request.getHeader("x-amz-date") == null) {
logger.debug("have empty x-amz-date");
} else {
hasXAmzDateHeader = true;
}
}
}
boolean haveBothDateHeader = false;
if (hasDateHeader && hasXAmzDateHeader) {
haveBothDateHeader = true;
}
// when access information is not provided in request header,
// treat it as anonymous, return all public accessible information
if (!anonymousIdentity &&
(method.equals("GET") || method.equals("HEAD") ||
method.equals("POST")) &&
request.getHeader(HttpHeaders.AUTHORIZATION) == null &&
// v2 or /v4
request.getParameter("X-Amz-Algorithm") == null && // v4 query
request.getParameter("AWSAccessKeyId") == null && // v2 query
defaultBlobStore != null) {
doHandleAnonymous(request, response, is, uri, defaultBlobStore);
return;
}
// should according the AWSAccessKeyId= Signature or auth header nil
if (!anonymousIdentity && !hasDateHeader && !hasXAmzDateHeader &&
request.getParameter("X-Amz-Date") == null &&
request.getParameter("Expires") == null) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED,
"AWS authentication requires a valid Date or" +
" x-amz-date header");
}
BlobStore blobStore;
String requestIdentity = null;
String headerAuthorization = request.getHeader(
HttpHeaders.AUTHORIZATION);
S3AuthorizationHeader authHeader = null;
boolean presignedUrl = false;
if (!anonymousIdentity) {
if (headerAuthorization == null) {
String algorithm = request.getParameter("X-Amz-Algorithm");
if (algorithm == null) { //v2 query
String identity = request.getParameter("AWSAccessKeyId");
String signature = request.getParameter("Signature");
if (identity == null || signature == null) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
headerAuthorization = "AWS " + identity + ":" + signature;
presignedUrl = true;
} else if (algorithm.equals("AWS4-HMAC-SHA256")) { //v4 query
String credential = request.getParameter(
"X-Amz-Credential");
String signedHeaders = request.getParameter(
"X-Amz-SignedHeaders");
String signature = request.getParameter(
"X-Amz-Signature");
if (credential == null || signedHeaders == null ||
signature == null) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
headerAuthorization = "AWS4-HMAC-SHA256" +
" Credential=" + credential +
", requestSignedHeaders=" + signedHeaders +
", Signature=" + signature;
presignedUrl = true;
}
}
try {
authHeader = new S3AuthorizationHeader(headerAuthorization);
//whether v2 or v4 (normal header and query)
} catch (IllegalArgumentException iae) {
throw new S3Exception(S3ErrorCode.INVALID_ARGUMENT, iae);
}
requestIdentity = authHeader.identity;
}
long dateSkew = 0; //date for timeskew check
//v2 GET /s3proxy-1080747708/foo?AWSAccessKeyId=local-identity&Expires=
//1510322602&Signature=UTyfHY1b1Wgr5BFEn9dpPlWdtFE%3D)
//have no date
if (!anonymousIdentity) {
boolean haveDate = true;
AuthenticationType finalAuthType = null;
if (authHeader.authenticationType == AuthenticationType.AWS_V2 &&
(authenticationType == AuthenticationType.AWS_V2 ||
authenticationType == AuthenticationType.AWS_V2_OR_V4)) {
finalAuthType = AuthenticationType.AWS_V2;
} else if (
authHeader.authenticationType == AuthenticationType.AWS_V4 &&
(authenticationType == AuthenticationType.AWS_V4 ||
authenticationType == AuthenticationType.AWS_V2_OR_V4)) {
finalAuthType = AuthenticationType.AWS_V4;
} else if (authenticationType != AuthenticationType.NONE) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
if (hasXAmzDateHeader) { //format diff between v2 and v4
if (finalAuthType == AuthenticationType.AWS_V2) {
dateSkew = request.getDateHeader("x-amz-date");
dateSkew /= 1000;
//case sensetive?
} else if (finalAuthType == AuthenticationType.AWS_V4) {
logger.debug("into process v4 {}",
request.getHeader("x-amz-date"));
dateSkew = parseIso8601(request.getHeader("x-amz-date"));
}
} else if (request.getParameter("X-Amz-Date") != null) { // v4 query
String dateString = request.getParameter("X-Amz-Date");
dateSkew = parseIso8601(dateString);
} else if (hasDateHeader) {
try {
dateSkew = request.getDateHeader(HttpHeaders.DATE);
logger.debug("dateheader {}", dateSkew);
} catch (IllegalArgumentException iae) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED, iae);
}
dateSkew /= 1000;
logger.debug("dateheader {}", dateSkew);
} else {
haveDate = false;
}
logger.debug("dateSkew {}", dateSkew);
if (haveDate) {
isTimeSkewed(dateSkew);
}
}
String[] path = uri.split("/", 3);
for (int i = 0; i < path.length; i++) {
path[i] = URLDecoder.decode(path[i], "UTF-8");
}
Map.Entry<String, BlobStore> provider =
blobStoreLocator.locateBlobStore(
requestIdentity, path.length > 1 ? path[1] : null,
path.length > 2 ? path[2] : null);
if (anonymousIdentity) {
blobStore = provider.getValue();
String contentSha256 = request.getHeader("x-amz-content-sha256");
if ("STREAMING-AWS4-HMAC-SHA256-PAYLOAD".equals(contentSha256)) {
is = new ChunkedInputStream(is);
}
} else if (requestIdentity == null) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
} else {
if (provider == null) {
throw new S3Exception(S3ErrorCode.INVALID_ACCESS_KEY_ID);
}
String credential = provider.getKey();
blobStore = provider.getValue();
String expiresString = request.getParameter("Expires");
if (expiresString != null) { // v2 query
long expires = Long.parseLong(expiresString);
long nowSeconds = System.currentTimeMillis() / 1000;
if (nowSeconds >= expires) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
}
String dateString = request.getParameter("X-Amz-Date");
//from para v4 query
expiresString = request.getParameter("X-Amz-Expires");
if (dateString != null && expiresString != null) { //v4 query
long date = parseIso8601(dateString);
long expires = Long.parseLong(expiresString);
long nowSeconds = System.currentTimeMillis() / 1000;
if (nowSeconds >= date + expires) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED,
"Request has expired");
}
}
// The aim ?
switch (authHeader.authenticationType) {
case AWS_V2:
switch (authenticationType) {
case AWS_V2:
case AWS_V2_OR_V4:
case NONE:
break;
default:
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
break;
case AWS_V4:
switch (authenticationType) {
case AWS_V4:
case AWS_V2_OR_V4:
case NONE:
break;
default:
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
break;
case NONE:
break;
default:
throw new IllegalArgumentException("Unhandled type: " +
authHeader.authenticationType);
}
String expectedSignature = null;
// When presigned url is generated, it doesn't consider service path
String uriForSigning = presignedUrl ? uri : this.servicePath + uri;
if (authHeader.hmacAlgorithm == null) { //v2
expectedSignature = AwsSignature.createAuthorizationSignature(
request, uriForSigning, credential, presignedUrl,
haveBothDateHeader);
} else {
String contentSha256 = request.getHeader(
"x-amz-content-sha256");
try {
byte[] payload;
if (request.getParameter("X-Amz-Algorithm") != null) {
payload = new byte[0];
} else if ("STREAMING-AWS4-HMAC-SHA256-PAYLOAD".equals(
contentSha256)) {
payload = new byte[0];
is = new ChunkedInputStream(is);
} else if ("UNSIGNED-PAYLOAD".equals(contentSha256)) {
payload = new byte[0];
} else {
// buffer the entire stream to calculate digest
// why input stream read contentlength of header?
payload = ByteStreams.toByteArray(ByteStreams.limit(
is, v4MaxNonChunkedRequestSize + 1));
if (payload.length == v4MaxNonChunkedRequestSize + 1) {
throw new S3Exception(
S3ErrorCode.MAX_MESSAGE_LENGTH_EXCEEDED);
}
// maybe we should check this when signing,
// a lot of dup code with aws sign code.
MessageDigest md = MessageDigest.getInstance(
authHeader.hashAlgorithm);
byte[] hash = md.digest(payload);
if (!contentSha256.equals(
BaseEncoding.base16().lowerCase()
.encode(hash))) {
throw new S3Exception(
S3ErrorCode
.X_AMZ_CONTENT_S_H_A_256_MISMATCH);
}
is = new ByteArrayInputStream(payload);
}
expectedSignature = AwsSignature
.createAuthorizationSignatureV4(// v4 sign
baseRequest, authHeader, payload, uriForSigning,
credential);
} catch (InvalidKeyException | NoSuchAlgorithmException e) {
throw new S3Exception(S3ErrorCode.INVALID_ARGUMENT, e);
}
}
if (!expectedSignature.equals(authHeader.signature)) {
logger.debug("fail to validate signature");
throw new S3Exception(S3ErrorCode.SIGNATURE_DOES_NOT_MATCH);
}
}
for (String parameter : Collections.list(
request.getParameterNames())) {
if (UNSUPPORTED_PARAMETERS.contains(parameter)) {
logger.error("Unknown parameters {} with URI {}",
parameter, request.getRequestURI());
throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED);
}
}
// emit NotImplemented for unknown x-amz- headers
for (String headerName : Collections.list(request.getHeaderNames())) {
if (ignoreUnknownHeaders) {
continue;
}
if (!headerName.startsWith("x-amz-")) {
continue;
}
if (headerName.startsWith("x-amz-meta-")) {
continue;
}
if (headerName.equals("x-amz-storage-class") &&
request.getHeader(headerName).equals("STANDARD")) {
continue;
}
if (!SUPPORTED_X_AMZ_HEADERS.contains(headerName.toLowerCase())) {
logger.error("Unknown header {} with URI {}",
headerName, request.getRequestURI());
throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED);
}
}
// Validate container name
if (!uri.equals("/") && !isValidContainer(path[1])) {
if (method.equals("PUT") &&
(path.length <= 2 || path[2].isEmpty()) &&
!("".equals(request.getParameter("acl")))) {
throw new S3Exception(S3ErrorCode.INVALID_BUCKET_NAME);
} else {
throw new S3Exception(S3ErrorCode.NO_SUCH_BUCKET);
}
}
String uploadId = request.getParameter("uploadId");
switch (method) {
case "DELETE":
if (path.length <= 2 || path[2].isEmpty()) {
handleContainerDelete(response, blobStore, path[1]);
return;
} else if (uploadId != null) {
handleAbortMultipartUpload(response, blobStore, path[1],
path[2], uploadId);
return;
} else {
handleBlobRemove(response, blobStore, path[1], path[2]);
return;
}
case "GET":
if (uri.equals("/")) {
handleContainerList(response, blobStore);
return;
} else if (path.length <= 2 || path[2].isEmpty()) {
if ("".equals(request.getParameter("acl"))) {
handleGetContainerAcl(response, blobStore, path[1]);
return;
} else if ("".equals(request.getParameter("location"))) {
handleContainerLocation(response, blobStore, path[1]);
return;
} else if ("".equals(request.getParameter("uploads"))) {
handleListMultipartUploads(request, response, blobStore,
path[1]);
return;
}
handleBlobList(request, response, blobStore, path[1]);
return;
} else {
if ("".equals(request.getParameter("acl"))) {
handleGetBlobAcl(response, blobStore, path[1],
path[2]);
return;
} else if (uploadId != null) {
handleListParts(request, response, blobStore, path[1],
path[2], uploadId);
return;
}
handleGetBlob(request, response, blobStore, path[1],
path[2]);
return;
}
case "HEAD":
if (path.length <= 2 || path[2].isEmpty()) {
handleContainerExists(blobStore, path[1]);
return;
} else {
handleBlobMetadata(request, response, blobStore, path[1],
path[2]);
return;
}
case "POST":
if ("".equals(request.getParameter("delete"))) {
handleMultiBlobRemove(response, is, blobStore, path[1]);
return;
} else if ("".equals(request.getParameter("uploads"))) {
handleInitiateMultipartUpload(request, response, blobStore,
path[1], path[2]);
return;
} else if (uploadId != null &&
request.getParameter("partNumber") == null) {
handleCompleteMultipartUpload(response, is, blobStore, path[1],
path[2], uploadId);
return;
}
break;
case "PUT":
if (path.length <= 2 || path[2].isEmpty()) {
if ("".equals(request.getParameter("acl"))) {
handleSetContainerAcl(request, response, is, blobStore,
path[1]);
return;
}
handleContainerCreate(request, response, is, blobStore,
path[1]);
return;
} else if (uploadId != null) {
if (request.getHeader("x-amz-copy-source") != null) {
handleCopyPart(request, response, blobStore, path[1],
path[2], uploadId);
} else {
handleUploadPart(request, response, is, blobStore, path[1],
path[2], uploadId);
}
return;
} else if (request.getHeader("x-amz-copy-source") != null) {
handleCopyBlob(request, response, is, blobStore, path[1],
path[2]);
return;
} else {
if ("".equals(request.getParameter("acl"))) {
handleSetBlobAcl(request, response, is, blobStore, path[1],
path[2]);
return;
}
handlePutBlob(request, response, is, blobStore, path[1],
path[2]);
return;
}
default:
break;
}
logger.error("Unknown method {} with URI {}",
method, request.getRequestURI());
throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED);
}
|
#vulnerable code
public final void doHandle(HttpServletRequest baseRequest,
HttpServletRequest request, HttpServletResponse response,
InputStream is) throws IOException, S3Exception {
String method = request.getMethod();
String uri = request.getRequestURI();
if (!this.servicePath.isEmpty()) {
if (uri.length() > this.servicePath.length()) {
uri = uri.substring(this.servicePath.length());
}
}
logger.debug("request: {}", request);
String hostHeader = request.getHeader(HttpHeaders.HOST);
if (hostHeader != null && virtualHost.isPresent()) {
hostHeader = HostAndPort.fromString(hostHeader).getHostText();
String virtualHostSuffix = "." + virtualHost.get();
if (!hostHeader.equals(virtualHost.get())) {
if (hostHeader.endsWith(virtualHostSuffix)) {
String bucket = hostHeader.substring(0,
hostHeader.length() - virtualHostSuffix.length());
uri = "/" + bucket + uri;
} else {
String bucket = hostHeader.toLowerCase();
uri = "/" + bucket + uri;
}
}
}
boolean hasDateHeader = false;
boolean hasXAmzDateHeader = false;
for (String headerName : Collections.list(request.getHeaderNames())) {
for (String headerValue : Collections.list(request.getHeaders(
headerName))) {
logger.debug("header: {}: {}", headerName,
Strings.nullToEmpty(headerValue));
}
if (headerName.equalsIgnoreCase(HttpHeaders.DATE)) {
hasDateHeader = true;
} else if (headerName.equalsIgnoreCase("x-amz-date")) {
logger.debug("have the x-amz-date heaer {}", headerName);
// why x-amz-date name exist,but value is null?
if ("".equals(request.getHeader("x-amz-date")) ||
request.getHeader("x-amz-date") == null) {
logger.debug("have empty x-amz-date");
} else {
hasXAmzDateHeader = true;
}
}
}
boolean haveBothDateHeader = false;
if (hasDateHeader && hasXAmzDateHeader) {
haveBothDateHeader = true;
}
// when access information is not provided in request header,
// treat it as anonymous, return all public accessible information
if (!anonymousIdentity &&
(method.equals("GET") || method.equals("HEAD") ||
method.equals("POST")) &&
request.getHeader(HttpHeaders.AUTHORIZATION) == null &&
// v2 or /v4
request.getParameter("X-Amz-Algorithm") == null && // v4 query
request.getParameter("AWSAccessKeyId") == null && // v2 query
defaultBlobStore != null) {
doHandleAnonymous(request, response, is, uri, defaultBlobStore);
return;
}
// should according the AWSAccessKeyId= Signature or auth header nil
if (!anonymousIdentity && !hasDateHeader && !hasXAmzDateHeader &&
request.getParameter("X-Amz-Date") == null &&
request.getParameter("Expires") == null) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED,
"AWS authentication requires a valid Date or" +
" x-amz-date header");
}
BlobStore blobStore;
String requestIdentity = null;
String headerAuthorization = request.getHeader(
HttpHeaders.AUTHORIZATION);
S3AuthorizationHeader authHeader = null;
boolean presignedUrl = false;
if (!anonymousIdentity) {
if (headerAuthorization == null) {
String algorithm = request.getParameter("X-Amz-Algorithm");
if (algorithm == null) { //v2 query
String identity = request.getParameter("AWSAccessKeyId");
String signature = request.getParameter("Signature");
if (identity == null || signature == null) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
headerAuthorization = "AWS " + identity + ":" + signature;
presignedUrl = true;
} else if (algorithm.equals("AWS4-HMAC-SHA256")) { //v4 query
String credential = request.getParameter(
"X-Amz-Credential");
String signedHeaders = request.getParameter(
"X-Amz-SignedHeaders");
String signature = request.getParameter(
"X-Amz-Signature");
if (credential == null || signedHeaders == null ||
signature == null) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
headerAuthorization = "AWS4-HMAC-SHA256" +
" Credential=" + credential +
", requestSignedHeaders=" + signedHeaders +
", Signature=" + signature;
presignedUrl = true;
}
}
try {
authHeader = new S3AuthorizationHeader(headerAuthorization);
//whether v2 or v4 (normal header and query)
} catch (IllegalArgumentException iae) {
throw new S3Exception(S3ErrorCode.INVALID_ARGUMENT, iae);
}
requestIdentity = authHeader.identity;
}
long dateSkew = 0; //date for timeskew check
//v2 GET /s3proxy-1080747708/foo?AWSAccessKeyId=local-identity&Expires=
//1510322602&Signature=UTyfHY1b1Wgr5BFEn9dpPlWdtFE%3D)
//have no date
boolean haveDate = true;
AuthenticationType finalAuthType = null;
if (authHeader.authenticationType == AuthenticationType.AWS_V2 &&
(authenticationType == AuthenticationType.AWS_V2 ||
authenticationType == AuthenticationType.AWS_V2_OR_V4)) {
finalAuthType = AuthenticationType.AWS_V2;
} else if (authHeader.authenticationType == AuthenticationType.AWS_V4 &&
(authenticationType == AuthenticationType.AWS_V4 ||
authenticationType == AuthenticationType.AWS_V2_OR_V4)) {
finalAuthType = AuthenticationType.AWS_V4;
} else if (authenticationType != AuthenticationType.NONE) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
if (hasXAmzDateHeader) { //format diff between v2 and v4
if (finalAuthType == AuthenticationType.AWS_V2) {
dateSkew = request.getDateHeader("x-amz-date");
dateSkew /= 1000;
//case sensetive?
} else if (finalAuthType == AuthenticationType.AWS_V4) {
logger.debug("into process v4 {}",
request.getHeader("x-amz-date"));
dateSkew = parseIso8601(request.getHeader("x-amz-date"));
}
} else if (request.getParameter("X-Amz-Date") != null) { // v4 query
String dateString = request.getParameter("X-Amz-Date");
dateSkew = parseIso8601(dateString);
} else if (hasDateHeader) {
try {
dateSkew = request.getDateHeader(HttpHeaders.DATE);
logger.debug("dateheader {}", dateSkew);
} catch (IllegalArgumentException iae) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED, iae);
}
dateSkew /= 1000;
logger.debug("dateheader {}", dateSkew);
} else {
haveDate = false;
}
logger.debug("dateSkew {}", dateSkew);
if (haveDate) {
isTimeSkewed(dateSkew);
}
String[] path = uri.split("/", 3);
for (int i = 0; i < path.length; i++) {
path[i] = URLDecoder.decode(path[i], "UTF-8");
}
Map.Entry<String, BlobStore> provider =
blobStoreLocator.locateBlobStore(
requestIdentity, path.length > 1 ? path[1] : null,
path.length > 2 ? path[2] : null);
if (anonymousIdentity) {
blobStore = provider.getValue();
String contentSha256 = request.getHeader("x-amz-content-sha256");
if ("STREAMING-AWS4-HMAC-SHA256-PAYLOAD".equals(contentSha256)) {
is = new ChunkedInputStream(is);
}
} else if (requestIdentity == null) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
} else {
if (provider == null) {
throw new S3Exception(S3ErrorCode.INVALID_ACCESS_KEY_ID);
}
String credential = provider.getKey();
blobStore = provider.getValue();
String expiresString = request.getParameter("Expires");
if (expiresString != null) { // v2 query
long expires = Long.parseLong(expiresString);
long nowSeconds = System.currentTimeMillis() / 1000;
if (nowSeconds >= expires) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
}
String dateString = request.getParameter("X-Amz-Date");
//from para v4 query
expiresString = request.getParameter("X-Amz-Expires");
if (dateString != null && expiresString != null) { //v4 query
long date = parseIso8601(dateString);
long expires = Long.parseLong(expiresString);
long nowSeconds = System.currentTimeMillis() / 1000;
if (nowSeconds >= date + expires) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED,
"Request has expired");
}
}
// The aim ?
switch (authHeader.authenticationType) {
case AWS_V2:
switch (authenticationType) {
case AWS_V2:
case AWS_V2_OR_V4:
case NONE:
break;
default:
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
break;
case AWS_V4:
switch (authenticationType) {
case AWS_V4:
case AWS_V2_OR_V4:
case NONE:
break;
default:
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
break;
case NONE:
break;
default:
throw new IllegalArgumentException("Unhandled type: " +
authHeader.authenticationType);
}
String expectedSignature = null;
// When presigned url is generated, it doesn't consider service path
String uriForSigning = presignedUrl ? uri : this.servicePath + uri;
if (authHeader.hmacAlgorithm == null) { //v2
expectedSignature = AwsSignature.createAuthorizationSignature(
request, uriForSigning, credential, presignedUrl,
haveBothDateHeader);
} else {
String contentSha256 = request.getHeader(
"x-amz-content-sha256");
try {
byte[] payload;
if (request.getParameter("X-Amz-Algorithm") != null) {
payload = new byte[0];
} else if ("STREAMING-AWS4-HMAC-SHA256-PAYLOAD".equals(
contentSha256)) {
payload = new byte[0];
is = new ChunkedInputStream(is);
} else if ("UNSIGNED-PAYLOAD".equals(contentSha256)) {
payload = new byte[0];
} else {
// buffer the entire stream to calculate digest
// why input stream read contentlength of header?
payload = ByteStreams.toByteArray(ByteStreams.limit(
is, v4MaxNonChunkedRequestSize + 1));
if (payload.length == v4MaxNonChunkedRequestSize + 1) {
throw new S3Exception(
S3ErrorCode.MAX_MESSAGE_LENGTH_EXCEEDED);
}
// maybe we should check this when signing,
// a lot of dup code with aws sign code.
MessageDigest md = MessageDigest.getInstance(
authHeader.hashAlgorithm);
byte[] hash = md.digest(payload);
if (!contentSha256.equals(
BaseEncoding.base16().lowerCase()
.encode(hash))) {
throw new S3Exception(
S3ErrorCode
.X_AMZ_CONTENT_S_H_A_256_MISMATCH);
}
is = new ByteArrayInputStream(payload);
}
expectedSignature = AwsSignature
.createAuthorizationSignatureV4(// v4 sign
baseRequest, authHeader, payload, uriForSigning,
credential);
} catch (InvalidKeyException | NoSuchAlgorithmException e) {
throw new S3Exception(S3ErrorCode.INVALID_ARGUMENT, e);
}
}
if (!expectedSignature.equals(authHeader.signature)) {
logger.debug("fail to validate signature");
throw new S3Exception(S3ErrorCode.SIGNATURE_DOES_NOT_MATCH);
}
}
for (String parameter : Collections.list(
request.getParameterNames())) {
if (UNSUPPORTED_PARAMETERS.contains(parameter)) {
logger.error("Unknown parameters {} with URI {}",
parameter, request.getRequestURI());
throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED);
}
}
// emit NotImplemented for unknown x-amz- headers
for (String headerName : Collections.list(request.getHeaderNames())) {
if (ignoreUnknownHeaders) {
continue;
}
if (!headerName.startsWith("x-amz-")) {
continue;
}
if (headerName.startsWith("x-amz-meta-")) {
continue;
}
if (headerName.equals("x-amz-storage-class") &&
request.getHeader(headerName).equals("STANDARD")) {
continue;
}
if (!SUPPORTED_X_AMZ_HEADERS.contains(headerName.toLowerCase())) {
logger.error("Unknown header {} with URI {}",
headerName, request.getRequestURI());
throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED);
}
}
// Validate container name
if (!uri.equals("/") && !isValidContainer(path[1])) {
if (method.equals("PUT") &&
(path.length <= 2 || path[2].isEmpty()) &&
!("".equals(request.getParameter("acl")))) {
throw new S3Exception(S3ErrorCode.INVALID_BUCKET_NAME);
} else {
throw new S3Exception(S3ErrorCode.NO_SUCH_BUCKET);
}
}
String uploadId = request.getParameter("uploadId");
switch (method) {
case "DELETE":
if (path.length <= 2 || path[2].isEmpty()) {
handleContainerDelete(response, blobStore, path[1]);
return;
} else if (uploadId != null) {
handleAbortMultipartUpload(response, blobStore, path[1],
path[2], uploadId);
return;
} else {
handleBlobRemove(response, blobStore, path[1], path[2]);
return;
}
case "GET":
if (uri.equals("/")) {
handleContainerList(response, blobStore);
return;
} else if (path.length <= 2 || path[2].isEmpty()) {
if ("".equals(request.getParameter("acl"))) {
handleGetContainerAcl(response, blobStore, path[1]);
return;
} else if ("".equals(request.getParameter("location"))) {
handleContainerLocation(response, blobStore, path[1]);
return;
} else if ("".equals(request.getParameter("uploads"))) {
handleListMultipartUploads(request, response, blobStore,
path[1]);
return;
}
handleBlobList(request, response, blobStore, path[1]);
return;
} else {
if ("".equals(request.getParameter("acl"))) {
handleGetBlobAcl(response, blobStore, path[1],
path[2]);
return;
} else if (uploadId != null) {
handleListParts(request, response, blobStore, path[1],
path[2], uploadId);
return;
}
handleGetBlob(request, response, blobStore, path[1],
path[2]);
return;
}
case "HEAD":
if (path.length <= 2 || path[2].isEmpty()) {
handleContainerExists(blobStore, path[1]);
return;
} else {
handleBlobMetadata(request, response, blobStore, path[1],
path[2]);
return;
}
case "POST":
if ("".equals(request.getParameter("delete"))) {
handleMultiBlobRemove(response, is, blobStore, path[1]);
return;
} else if ("".equals(request.getParameter("uploads"))) {
handleInitiateMultipartUpload(request, response, blobStore,
path[1], path[2]);
return;
} else if (uploadId != null &&
request.getParameter("partNumber") == null) {
handleCompleteMultipartUpload(response, is, blobStore, path[1],
path[2], uploadId);
return;
}
break;
case "PUT":
if (path.length <= 2 || path[2].isEmpty()) {
if ("".equals(request.getParameter("acl"))) {
handleSetContainerAcl(request, response, is, blobStore,
path[1]);
return;
}
handleContainerCreate(request, response, is, blobStore,
path[1]);
return;
} else if (uploadId != null) {
if (request.getHeader("x-amz-copy-source") != null) {
handleCopyPart(request, response, blobStore, path[1],
path[2], uploadId);
} else {
handleUploadPart(request, response, is, blobStore, path[1],
path[2], uploadId);
}
return;
} else if (request.getHeader("x-amz-copy-source") != null) {
handleCopyBlob(request, response, is, blobStore, path[1],
path[2]);
return;
} else {
if ("".equals(request.getParameter("acl"))) {
handleSetBlobAcl(request, response, is, blobStore, path[1],
path[2]);
return;
}
handlePutBlob(request, response, is, blobStore, path[1],
path[2]);
return;
}
default:
break;
}
logger.error("Unknown method {} with URI {}",
method, request.getRequestURI());
throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED);
}
#location 135
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
protected Rectangle computeBoundingBox(Collection<? extends Shape> shapes, SpatialContext ctx) {
if (shapes.isEmpty())
return ctx.makeRectangle(Double.NaN, Double.NaN, Double.NaN, Double.NaN);
BBoxCalculator bboxCalc = new BBoxCalculator(ctx);
for (Shape geom : shapes) {
bboxCalc.expandRange(geom.getBoundingBox());
}
return bboxCalc.getBoundary();
}
|
#vulnerable code
protected Rectangle computeBoundingBox(Collection<? extends Shape> shapes, SpatialContext ctx) {
if (shapes.isEmpty())
return ctx.makeRectangle(Double.NaN, Double.NaN, Double.NaN, Double.NaN);
Range xRange = null;
double minY = Double.POSITIVE_INFINITY;
double maxY = Double.NEGATIVE_INFINITY;
for (Shape geom : shapes) {
Rectangle r = geom.getBoundingBox();
Range xRange2 = Range.xRange(r, ctx);
if (xRange == null) {
xRange = xRange2;
} else {
xRange = xRange.expandTo(xRange2);
}
minY = Math.min(minY, r.getMinY());
maxY = Math.max(maxY, r.getMaxY());
}
return ctx.makeRectangle(xRange.getMin(), xRange.getMax(), minY, maxY);
}
#location 19
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public M getCache(Object key) {
return Jboot.getCache().get(tableName(), key);
}
|
#vulnerable code
public M getCache(Object key) {
return Jboot.getJbootCache().get(tableName(), key);
}
#location 2
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public List<M> getListCache(Object key, IDataLoader dataloader) {
return Jboot.getCache().get(tableName(), key, dataloader);
}
|
#vulnerable code
public List<M> getListCache(Object key, IDataLoader dataloader) {
return Jboot.getJbootCache().get(tableName(), key, dataloader);
}
#location 2
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private void copyConnStreamToResponse(HttpURLConnection conn, HttpServletResponse resp) throws IOException {
if (resp.isCommitted()) {
return;
}
InputStream inStream = null;
OutputStream outStream = null;
try {
inStream = getInputStream(conn);
outStream = resp.getOutputStream();
byte[] buffer = new byte[1024];
for (int len = -1; (len = inStream.read(buffer)) != -1; ) {
outStream.write(buffer, 0, len);
}
outStream.flush();
} finally {
quetlyClose(inStream, outStream);
}
}
|
#vulnerable code
private void copyConnStreamToResponse(HttpURLConnection conn, HttpServletResponse resp) throws IOException {
InputStream inStream = null;
InputStreamReader reader = null;
try {
if (!resp.isCommitted()) {
PrintWriter writer = resp.getWriter();
inStream = getInputStream(conn);
reader = new InputStreamReader(inStream);
int len;
char[] buffer = new char[1024];
while ((len = reader.read(buffer)) != -1) {
writer.write(buffer, 0, len);
}
}
} finally {
quetlyClose(inStream, reader);
}
}
#location 16
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void handle(String target, HttpServletRequest request, HttpServletResponse response, boolean[] isHandled) {
if (target.indexOf('.') != -1 || JbootWebsocketManager.me().isWebsokcetEndPoint(target)) {
return;
}
/**
* 初始化 当前线程的 Hystrix
*/
HystrixRequestContext context = HystrixRequestContext.initializeContext();
/**
* 通过 JbootRequestContext 去保存 request,然后可以在当前线程的任何地方
* 通过 JbootRequestContext.getRequest() 去获取。
*/
JbootRequestContext.handle(request, response);
/**
* 初始化 异常记录器,用于记录异常信息,然后在页面输出
*/
JbootExceptionHolder.init();
try {
/**
* 执行请求逻辑
*/
doHandle(target, new JbootServletRequestWrapper(request), response, isHandled);
} finally {
context.shutdown();
JbootRequestContext.release();
JbootExceptionHolder.release();
}
}
|
#vulnerable code
@Override
public void handle(String target, HttpServletRequest request, HttpServletResponse response, boolean[] isHandled) {
if (target.indexOf('.') != -1 || JbootWebsocketManager.me().containsEndPoint(target)) {
return;
}
/**
* 初始化 当前线程的 Hystrix
*/
HystrixRequestContext context = HystrixRequestContext.initializeContext();
/**
* 通过 JbootRequestContext 去保存 request,然后可以在当前线程的任何地方
* 通过 JbootRequestContext.getRequest() 去获取。
*/
JbootRequestContext.handle(request, response);
/**
* 初始化 异常记录器,用于记录异常信息,然后在页面输出
*/
JbootExceptionHolder.init();
try {
/**
* 执行请求逻辑
*/
doHandle(target, new JbootServletRequestWrapper(request), response, isHandled);
} finally {
context.shutdown();
JbootRequestContext.release();
JbootExceptionHolder.release();
}
}
#location 4
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
static void putDataToCache(int liveSeconds, String cacheName, String cacheKey, Object data) {
liveSeconds = liveSeconds > 0
? liveSeconds
: CONFIG.getAopCacheLiveSeconds();
if (liveSeconds > 0) {
AopCache.put(cacheName, cacheKey, data, liveSeconds);
} else {
AopCache.put(cacheName, cacheKey, data);
}
}
|
#vulnerable code
static void putDataToCache(int liveSeconds, String cacheName, String cacheKey, Object data) {
liveSeconds = liveSeconds > 0
? liveSeconds
: CONFIG.getAopCacheLiveSeconds();
if (liveSeconds > 0) {
getAopCache().put(cacheName, cacheKey, data, liveSeconds);
} else {
getAopCache().put(cacheName, cacheKey, data);
}
}
#location 8
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void run() {
if (redis == null) {
return;
}
Long result = null;
for (int i = 0; i < 5; i++) {
Long setTimeMillis = System.currentTimeMillis();
result = redis.setnx(key, setTimeMillis);
//error
if (result == null) {
quietSleep();
}
//setnx fail
else if (result == 0) {
Long saveTimeMillis = redis.get(key);
if (saveTimeMillis == null) {
reset();
}
long ttl = System.currentTimeMillis() - saveTimeMillis;
if (ttl > expire) {
//防止死锁
reset();
}
// 休息 2 秒钟,重新去抢,因为可能别的应用执行失败了
quietSleep();
}
//set success
else if (result == 1) {
break;
}
}
//抢了5次都抢不到,证明已经被别的应用抢走了
if (result == null || result == 0) {
return;
}
try {
if (runnable != null) {
runnable.run();
} else {
boolean runSuccess = execute();
//run()执行失败,让别的分布式应用APP去执行
//如果run()执行的时间很长(超过30秒),那么别的分布式应用可能也抢不到了,只能等待下次轮休
//作用:故障转移
if (!runSuccess) {
reset();
}
}
}
// 如果 run() 执行异常,让别的分布式应用APP去执行
// 作用:故障转移
catch (Throwable ex) {
LOG.error(ex.toString(), ex);
reset();
}
}
|
#vulnerable code
@Override
public void run() {
Long result = null;
for (int i = 0; i < 6; i++) {
result = redis.setnx(key, "locked");
//error
if (result == null) {
quietSleep();
}
//setnx fail
else if (result == 0) {
Long ttl = redis.ttl(key);
if (ttl == null || ttl <= 0 || ttl > expire) {
//防止死锁
reset();
} else {
// 休息 2 秒钟,重新去抢,因为可能别的设置好后,但是却执行失败了
quietSleep();
}
}
//set success
else if (result == 1) {
break;
}
}
//抢了5次都抢不到,证明已经被别的应用抢走了
if (result == null || result == 0) {
return;
}
//抢到了,但是设置超时时间设置失败,删除后,让分布式的其他app去抢
Long expireResult = redis.expire(key, 50);
if (expireResult == null && expireResult <= 0) {
reset();
return;
}
try {
boolean runSuccess = execute();
//run()执行失败,让别的分布式应用APP去执行
//如果run()执行的时间很长(超过30秒),那么别的分布式应用可能也抢不到了,只能等待下次轮休
//作用:故障转移
if (!runSuccess) {
reset();
}
}
// 如果 run() 执行异常,让别的分布式应用APP去执行
// 作用:故障转移
catch (Throwable ex) {
LOG.error(ex.toString(), ex);
reset();
}
}
#location 40
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
protected <T> T doGet(Class<T> targetClass, int injectDepth) throws ReflectiveOperationException {
return doGet(targetClass);
}
|
#vulnerable code
@Override
protected <T> T doGet(Class<T> targetClass, int injectDepth) throws ReflectiveOperationException {
// Aop.get(obj.getClass()) 可以用 Aop.inject(obj),所以注掉下一行代码
// targetClass = (Class<T>)getUsefulClass(targetClass);
targetClass = (Class<T>) getMappingClass(targetClass);
Singleton si = targetClass.getAnnotation(Singleton.class);
boolean singleton = (si != null ? si.value() : this.singleton);
Object ret;
if (!singleton) {
ret = createObject(targetClass);
doInject(targetClass, ret, injectDepth);
return (T) ret;
}
ret = singletonCache.get(targetClass);
if (ret != null) {
return (T) ret;
}
//只有在循环依赖的时候,这个context才会有值
ret = context.get().get(targetClass);
if (ret != null) {
return (T) ret;
}
synchronized (this) {
ret = singletonCache.get(targetClass);
if (ret == null) {
// ret = createObject(targetClass);
// doInject(targetClass, ret, injectDepth);
// singletonCache.put(targetClass, ret);
ret = createObject(targetClass);
//保存到本次初始化的上下文
context.get().put(targetClass, ret);
//循环注入
doInject(targetClass, ret, injectDepth);
//保存到缓存、并清除上下文数据
singletonCache.put(targetClass, ret);
context.get().clear();
context.remove();
}
}
return (T) ret;
}
#location 14
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
protected <T> T doGet(Class<T> targetClass, int injectDepth) throws ReflectiveOperationException {
return doGet(targetClass);
}
|
#vulnerable code
@Override
protected <T> T doGet(Class<T> targetClass, int injectDepth) throws ReflectiveOperationException {
// Aop.get(obj.getClass()) 可以用 Aop.inject(obj),所以注掉下一行代码
// targetClass = (Class<T>)getUsefulClass(targetClass);
targetClass = (Class<T>) getMappingClass(targetClass);
Singleton si = targetClass.getAnnotation(Singleton.class);
boolean singleton = (si != null ? si.value() : this.singleton);
Object ret;
if (!singleton) {
ret = createObject(targetClass);
doInject(targetClass, ret, injectDepth);
return (T) ret;
}
ret = singletonCache.get(targetClass);
if (ret != null) {
return (T) ret;
}
//只有在循环依赖的时候,这个context才会有值
ret = context.get().get(targetClass);
if (ret != null) {
return (T) ret;
}
synchronized (this) {
ret = singletonCache.get(targetClass);
if (ret == null) {
// ret = createObject(targetClass);
// doInject(targetClass, ret, injectDepth);
// singletonCache.put(targetClass, ret);
ret = createObject(targetClass);
//保存到本次初始化的上下文
context.get().put(targetClass, ret);
//循环注入
doInject(targetClass, ret, injectDepth);
//保存到缓存、并清除上下文数据
singletonCache.put(targetClass, ret);
context.get().clear();
context.remove();
}
}
return (T) ret;
}
#location 14
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void removeAttribute(String name) {
Jboot.getCache().remove(SESSION_CACHE_NAME, buildKey(name));
}
|
#vulnerable code
@Override
public void removeAttribute(String name) {
Jboot.getJbootCache().remove(SESSION_CACHE_NAME, buildKey(name));
}
#location 3
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public byte[] serialize(Object obj) throws IOException {
return SerializerManager.me().getSerializer(config.getSerializer()).serialize(obj);
}
|
#vulnerable code
@Override
public byte[] serialize(Object obj) throws IOException {
return Jboot.me().getSerializer().serialize(obj);
}
#location 3
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void remove(String key) {
Jboot.getCache().remove(cache_name, key);
}
|
#vulnerable code
@Override
public void remove(String key) {
Jboot.getJbootCache().remove(cache_name, key);
}
#location 3
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public int getErrorCode() {
return errorCode;
}
|
#vulnerable code
public int getErrorCode() {
return (int) get("errorCode");
}
#location 2
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private static void copyStreamToResponse(HttpURLConnection connection, HttpServletResponse response) throws IOException {
InputStream inStream = null;
InputStreamReader reader = null;
try {
if (!response.isCommitted()) {
PrintWriter writer = response.getWriter();
inStream = getInputStream(connection);
reader = new InputStreamReader(inStream);
int len;
char[] buffer = new char[1024];
while ((len = reader.read(buffer)) != -1) {
writer.write(buffer, 0, len);
}
}
} finally {
quetlyClose(inStream, reader);
}
}
|
#vulnerable code
private static void copyStreamToResponse(HttpURLConnection connection, HttpServletResponse response) throws IOException {
InputStream inStream = null;
try {
if (!response.isCommitted()) {
PrintWriter writer = response.getWriter();
inStream = getInputStream(connection);
int len;
char[] buffer = new char[1024];
InputStreamReader r = new InputStreamReader(inStream);
while ((len = r.read(buffer)) != -1) {
writer.write(buffer, 0, len);
}
}
} finally {
quetlyClose(inStream);
}
}
#location 10
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public static void unzip(String zipFilePath, String targetPath) throws IOException {
unzip(zipFilePath, targetPath, true);
}
|
#vulnerable code
public static void unzip(String zipFilePath, String targetPath) throws IOException {
ZipFile zipFile = new ZipFile(zipFilePath);
try{
Enumeration<?> entryEnum = zipFile.entries();
if (null != entryEnum) {
while (entryEnum.hasMoreElements()) {
OutputStream os = null;
InputStream is = null;
try {
ZipEntry zipEntry = (ZipEntry) entryEnum.nextElement();
if (!zipEntry.isDirectory()) {
File targetFile = new File(targetPath + File.separator + zipEntry.getName());
if (!targetFile.getParentFile().exists()) {
targetFile.getParentFile().mkdirs();
}
os = new BufferedOutputStream(new FileOutputStream(targetFile));
is = zipFile.getInputStream(zipEntry);
byte[] buffer = new byte[4096];
int readLen = 0;
while ((readLen = is.read(buffer, 0, 4096)) > 0) {
os.write(buffer, 0, readLen);
}
}
} finally {
if (is != null)
is.close();
if (os != null)
os.close();
}
}
}
}finally{
zipFile.close();
}
}
#location 33
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void put(String cacheName, Object key, Object value) {
try {
ehcache.put(cacheName, key, value);
redisCache.put(cacheName, key, value);
} finally {
publishMessage(JbootEhredisMessage.ACTION_PUT, cacheName, key);
}
}
|
#vulnerable code
@Override
public void put(String cacheName, Object key, Object value) {
try {
ehcache.put(cacheName, key, value);
redisCache.put(cacheName, key, value);
} finally {
Jboot.me().getMq().publish(new JbootEhredisMessage(clientId, JbootEhredisMessage.ACTION_PUT, cacheName, key), channel);
}
}
#location 7
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public boolean isConfigOk() {
if (configOk != null) {
return configOk;
}
synchronized (this) {
if (configOk == null) {
configOk = uri != null && uri.length > 0;
if (configOk) {
ensureUriConfigCorrect();
}
}
}
return configOk;
}
|
#vulnerable code
public boolean isConfigOk() {
if (configOk != null) {
return configOk;
}
synchronized (this) {
if (configOk == null) {
configOk = StrUtil.isNotBlank(uri);
if (configOk) {
ensureUriConfigCorrect();
}
}
}
return configOk;
}
#location 3
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void index() {
renderText("hello " + serviceTest.getName("aaa"));
}
|
#vulnerable code
public void index() {
List<Record> records = Db.find("select * from `user`");
System.out.println("index .... ");
LogKit.error("xxxxxxx");
Jboot.getCache().put("test","test","valueeeeeeeeee");
String value = Jboot.getCache().get("test","test");
System.out.println("value:"+value);
renderText("hello " + serviceTest.getName());
// render();
}
#location 11
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void remove(String cacheName, Object key) {
try {
ehcache.remove(cacheName, key);
redisCache.remove(cacheName, key);
} finally {
publishMessage(JbootEhredisMessage.ACTION_REMOVE, cacheName, key);
}
}
|
#vulnerable code
@Override
public void remove(String cacheName, Object key) {
try {
ehcache.remove(cacheName, key);
redisCache.remove(cacheName, key);
} finally {
Jboot.me().getMq().publish(new JbootEhredisMessage(clientId, JbootEhredisMessage.ACTION_REMOVE, cacheName, key), channel);
}
}
#location 7
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
static void doCacheEvict(Object[] arguments, Class targetClass, Method method, CacheEvict evict) {
String unless = AnnotationUtil.get(evict.unless());
if (Utils.isUnless(unless, method, arguments)) {
return;
}
String cacheName = AnnotationUtil.get(evict.name());
if (StrUtil.isBlank(cacheName)) {
throw new JbootException(String.format("CacheEvict.name() must not empty in method [%s].",
ClassUtil.buildMethodString(method)));
}
String cacheKey = AnnotationUtil.get(evict.key());
if (StrUtil.isBlank(cacheKey) || "*".equals(cacheKey)) {
AopCache.removeAll(cacheName);
} else {
cacheKey = Utils.buildCacheKey(cacheKey, targetClass, method, arguments);
AopCache.remove(cacheName, cacheKey);
}
}
|
#vulnerable code
static void doCacheEvict(Object[] arguments, Class targetClass, Method method, CacheEvict evict) {
String unless = AnnotationUtil.get(evict.unless());
if (Utils.isUnless(unless, method, arguments)) {
return;
}
String cacheName = AnnotationUtil.get(evict.name());
if (StrUtil.isBlank(cacheName)) {
throw new JbootException(String.format("CacheEvict.name() must not empty in method [%s].",
ClassUtil.buildMethodString(method)));
}
String cacheKey = AnnotationUtil.get(evict.key());
if (StrUtil.isBlank(cacheKey) || "*".equals(cacheKey)) {
getAopCache().removeAll(cacheName);
} else {
cacheKey = Utils.buildCacheKey(cacheKey, targetClass, method, arguments);
getAopCache().remove(cacheName, cacheKey);
}
}
#location 13
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void doGenerate(String excludeTables) {
System.out.println("start generate...");
DataSource dataSource = CodeGenHelpler.getDatasource();
List<TableMeta> tableMetaList = new MetaBuilder(dataSource).build();
CodeGenHelpler.excludeTables(tableMetaList, excludeTables);
generate(tableMetaList);
System.out.println("generate finished !!!");
}
|
#vulnerable code
public void doGenerate(String excludeTables) {
System.out.println("start generate...");
DatasourceConfig datasourceConfig = JbootProperties.get("jboot.datasource", DatasourceConfig.class);
HikariConfig config = new HikariConfig();
config.setJdbcUrl(datasourceConfig.getUrl());
config.setUsername(datasourceConfig.getUser());
config.setPassword(datasourceConfig.getPassword());
config.addDataSourceProperty("cachePrepStmts", "true");
config.addDataSourceProperty("prepStmtCacheSize", "250");
config.addDataSourceProperty("prepStmtCacheSqlLimit", "2048");
config.setDriverClassName("com.mysql.jdbc.Driver");
HikariDataSource dataSource = new HikariDataSource(config);
List<TableMeta> tableMetaList = new MetaBuilder(dataSource).build();
if (StringUtils.isNotBlank(excludeTables)) {
List<TableMeta> newTableMetaList = new ArrayList<>();
Set<String> excludeTableSet = StringUtils.splitToSet(excludeTables.toLowerCase(), ",");
for (TableMeta tableMeta : tableMetaList) {
if (excludeTableSet.contains(tableMeta.name.toLowerCase())) {
System.out.println("exclude table : " + tableMeta.name);
continue;
}
newTableMetaList.add(tableMeta);
}
tableMetaList = newTableMetaList;
}
generate(tableMetaList);
System.out.println("generate finished !!!");
}
#location 17
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void putValue(String name, Object value) {
Jboot.getCache().put(SESSION_CACHE_NAME, buildKey(name), value);
}
|
#vulnerable code
@Override
public void putValue(String name, Object value) {
Jboot.getJbootCache().put(SESSION_CACHE_NAME, buildKey(name), value);
}
#location 3
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void removeAll(String cacheName) {
try {
ehcache.removeAll(cacheName);
redisCache.removeAll(cacheName);
} finally {
publishMessage(JbootEhredisMessage.ACTION_REMOVE_ALL, cacheName, null);
}
}
|
#vulnerable code
@Override
public void removeAll(String cacheName) {
try {
ehcache.removeAll(cacheName);
redisCache.removeAll(cacheName);
} finally {
Jboot.me().getMq().publish(new JbootEhredisMessage(clientId, JbootEhredisMessage.ACTION_REMOVE_ALL, cacheName, null), channel);
}
}
#location 7
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
static void doCacheEvict(Object[] arguments, Class targetClass, Method method, CacheEvict evict) {
String unless = AnnotationUtil.get(evict.unless());
if (Utils.isUnless(unless, method, arguments)) {
return;
}
String cacheName = AnnotationUtil.get(evict.name());
if (StrUtil.isBlank(cacheName)) {
throw new JbootException(String.format("CacheEvict.name() must not empty in method [%s].",
ClassUtil.buildMethodString(method)));
}
String cacheKey = AnnotationUtil.get(evict.key());
if (StrUtil.isBlank(cacheKey) || "*".equals(cacheKey)) {
AopCache.removeAll(cacheName);
} else {
cacheKey = Utils.buildCacheKey(cacheKey, targetClass, method, arguments);
AopCache.remove(cacheName, cacheKey);
}
}
|
#vulnerable code
static void doCacheEvict(Object[] arguments, Class targetClass, Method method, CacheEvict evict) {
String unless = AnnotationUtil.get(evict.unless());
if (Utils.isUnless(unless, method, arguments)) {
return;
}
String cacheName = AnnotationUtil.get(evict.name());
if (StrUtil.isBlank(cacheName)) {
throw new JbootException(String.format("CacheEvict.name() must not empty in method [%s].",
ClassUtil.buildMethodString(method)));
}
String cacheKey = AnnotationUtil.get(evict.key());
if (StrUtil.isBlank(cacheKey) || "*".equals(cacheKey)) {
getAopCache().removeAll(cacheName);
} else {
cacheKey = Utils.buildCacheKey(cacheKey, targetClass, method, arguments);
getAopCache().remove(cacheName, cacheKey);
}
}
#location 2
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public boolean acquire() {
long timeout = timeoutMsecs;
do {
long expires = System.currentTimeMillis() + expireMsecs + 1;
Long result = redis.setnx(lockName, expires);
if (result != null && result == 1) {
// lock acquired
locked = true;
return true;
}
Long currentValue = redis.get(lockName);
if (currentValue != null && currentValue < System.currentTimeMillis()) {
//判断是否为空,不为空的情况下,如果被其他线程设置了值,则第二个条件判断是过不去的
// lock is expired
Long oldValue = redis.getSet(lockName, expires);
//获取上一个锁到期时间,并设置现在的锁到期时间,
//只有一个线程才能获取上一个线上的设置时间,因为jedis.getSet是同步的
if (oldValue != null && oldValue.equals(currentValue)) {
//如果这个时候,多个线程恰好都到了这里
//只有一个线程的设置值和当前值相同,他才有权利获取锁
//lock acquired
locked = true;
return true;
}
}
if (timeout > 0) {
timeout -= 100;
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
} while (timeout > 0);
return false;
}
|
#vulnerable code
public boolean acquire() {
long timeout = timeoutMsecs;
do {
long expires = System.currentTimeMillis() + expireMsecs + 1;
Long result = Jboot.me().getRedis().setnx(lockName, expires);
if (result != null && result == 1) {
// lock acquired
locked = true;
return true;
}
Long currentValue = Jboot.me().getRedis().get(lockName);
if (currentValue != null && currentValue < System.currentTimeMillis()) {
//判断是否为空,不为空的情况下,如果被其他线程设置了值,则第二个条件判断是过不去的
// lock is expired
Long oldValue = Jboot.me().getRedis().getSet(lockName, expires);
//获取上一个锁到期时间,并设置现在的锁到期时间,
//只有一个线程才能获取上一个线上的设置时间,因为jedis.getSet是同步的
if (oldValue != null && oldValue.equals(currentValue)) {
//如果这个时候,多个线程恰好都到了这里
//只有一个线程的设置值和当前值相同,他才有权利获取锁
//lock acquired
locked = true;
return true;
}
}
if (timeout > 0) {
timeout -= 100;
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
} while (timeout > 0);
return false;
}
#location 7
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private void publishMessage(int action, String cacheName, Object key) {
JbootEhredisMessage message = new JbootEhredisMessage(clientId, action, cacheName, key);
redis.publish(serializer.serialize(channel), serializer.serialize(message));
}
|
#vulnerable code
private void publishMessage(int action, String cacheName, Object key) {
Jboot.me().getMq().publish(new JbootEhredisMessage(clientId, action, cacheName, key), channel);
}
#location 2
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public static <T> T service(Class<T> clazz, String group, String version) {
return jboot.getRpc().serviceObtain(clazz, group, version);
}
|
#vulnerable code
public static <T> T service(Class<T> clazz, String group, String version) {
return me().getRpc().serviceObtain(clazz, group, version);
}
#location 2
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public <T> T get(Class<T> clazz, String prefix, String file) {
/**
* 开发模式下,热加载会导致由于Config是不同的 ClassLoader,
* 如果走缓存会Class转化异常
*/
if (isDevMode()) {
return createConfigObject(clazz, prefix, file);
}
Object configObject = configCache.get(clazz.getName() + prefix);
if (configObject == null) {
synchronized (clazz) {
if (configObject == null) {
configObject = createConfigObject(clazz, prefix, file);
configCache.put(clazz.getName() + prefix, configObject);
}
}
}
return (T) configObject;
}
|
#vulnerable code
public <T> T get(Class<T> clazz, String prefix, String file) {
/**
* 开发模式下,热加载会导致由于Config是不同的 ClassLoader,
* 如果走缓存会Class转化异常
*/
if (JbootApplication.isDevMode()) {
return createConfigObject(clazz, prefix, file);
}
Object configObject = configCache.get(clazz.getName() + prefix);
if (configObject == null) {
synchronized (clazz) {
if (configObject == null) {
configObject = createConfigObject(clazz, prefix, file);
configCache.put(clazz.getName() + prefix, configObject);
}
}
}
return (T) configObject;
}
#location 7
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
protected void doInject(Class<?> targetClass, Object targetObject, int injectDepth) throws ReflectiveOperationException {
if ((injectDepth--) <= 0) {
return;
}
targetClass = getUsefulClass(targetClass);
Field[] fields = targetClass.getDeclaredFields();
if (fields.length == 0) {
return;
}
for (Field field : fields) {
// Inject inject = field.getAnnotation(Inject.class);
// if (inject == null) {
// continue ;
// }
//
// Class<?> fieldInjectedClass = inject.value();
// if (fieldInjectedClass == Void.class) {
// fieldInjectedClass = field.getType();
// }
//
// Object fieldInjectedObject = doGet(fieldInjectedClass, injectDepth);
// field.setAccessible(true);
// field.set(targetObject, fieldInjectedObject);
Inject inject = field.getAnnotation(Inject.class);
if (inject != null) {
doInjectJFinalOrginal(targetObject, field, inject, injectDepth);
continue;
}
ConfigValue configValue = field.getAnnotation(ConfigValue.class);
if (configValue != null) {
doInjectConfigValue(targetObject, field, configValue);
continue;
}
RPCInject rpcInject = field.getAnnotation(RPCInject.class);
if (rpcInject != null) {
doInjectRPC(targetObject, field, rpcInject);
continue;
}
}
}
|
#vulnerable code
@Override
protected void doInject(Class<?> targetClass, Object targetObject, int injectDepth) throws ReflectiveOperationException {
if ((injectDepth--) <= 0) {
return;
}
targetClass = getUsefulClass(targetClass);
Field[] fields = targetClass.getDeclaredFields();
if (fields.length == 0) {
return;
}
for (Field field : fields) {
// Inject inject = field.getAnnotation(Inject.class);
// if (inject == null) {
// continue ;
// }
//
// Class<?> fieldInjectedClass = inject.value();
// if (fieldInjectedClass == Void.class) {
// fieldInjectedClass = field.getType();
// }
//
// Object fieldInjectedObject = doGet(fieldInjectedClass, injectDepth);
// field.setAccessible(true);
// field.set(targetObject, fieldInjectedObject);
Inject inject = field.getAnnotation(Inject.class);
if (inject != null) {
injectByJFinalInject(targetObject, field, inject, injectDepth);
continue;
}
InjectConfigValue injectConfigValue = field.getAnnotation(InjectConfigValue.class);
if (injectConfigValue != null) {
doInjectConfigValue(targetObject, field, injectConfigValue);
continue;
}
InjectConfigModel injectConfigModel = field.getAnnotation(InjectConfigModel.class);
if (injectConfigModel != null) {
doInjectConfigModel(targetObject, field, injectConfigModel);
continue;
}
RPCInject rpcInject = field.getAnnotation(RPCInject.class);
if (rpcInject != null) {
doInjectRPC(targetObject, field, rpcInject);
continue;
}
}
}
#location 42
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
protected <T> T doGet(Class<T> targetClass, int injectDepth) throws ReflectiveOperationException {
return doGet(targetClass);
}
|
#vulnerable code
@Override
protected <T> T doGet(Class<T> targetClass, int injectDepth) throws ReflectiveOperationException {
// Aop.get(obj.getClass()) 可以用 Aop.inject(obj),所以注掉下一行代码
// targetClass = (Class<T>)getUsefulClass(targetClass);
targetClass = (Class<T>) getMappingClass(targetClass);
Singleton si = targetClass.getAnnotation(Singleton.class);
boolean singleton = (si != null ? si.value() : this.singleton);
Object ret;
if (!singleton) {
ret = createObject(targetClass);
doInject(targetClass, ret, injectDepth);
return (T) ret;
}
ret = singletonCache.get(targetClass);
if (ret != null) {
return (T) ret;
}
//只有在循环依赖的时候,这个context才会有值
ret = context.get().get(targetClass);
if (ret != null) {
return (T) ret;
}
synchronized (this) {
ret = singletonCache.get(targetClass);
if (ret == null) {
// ret = createObject(targetClass);
// doInject(targetClass, ret, injectDepth);
// singletonCache.put(targetClass, ret);
ret = createObject(targetClass);
//保存到本次初始化的上下文
context.get().put(targetClass, ret);
//循环注入
doInject(targetClass, ret, injectDepth);
//保存到缓存、并清除上下文数据
singletonCache.put(targetClass, ret);
context.get().clear();
context.remove();
}
}
return (T) ret;
}
#location 13
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testRedis() {
// Jboot.setBootArg("jboot.redis.host", "127.0.0.1");
//// Jboot.setBootArg("jboot.redis.password", "123456");
//
// JbootRedis redis = Jboot.me().getRedis();
// redis.set("mykey", "myvalue");
//
// redis.lpush("list", 1,2,3,4,5);
//
// System.out.println(redis.get("mykey").toString());
// System.out.println(redis.lrange("list", 0, -1));
//
// System.out.println(redis.blpop(10000, "list"));
}
|
#vulnerable code
@Test
public void testRedis() {
Jboot.setBootArg("jboot.redis.host", "127.0.0.1");
// Jboot.setBootArg("jboot.redis.password", "123456");
JbootRedis redis = Jboot.me().getRedis();
redis.set("mykey", "myvalue");
redis.lpush("list", 1,2,3,4,5);
System.out.println(redis.get("mykey").toString());
System.out.println(redis.lrange("list", 0, -1));
System.out.println(redis.blpop(10000, "list"));
}
#location 8
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testHttpDownload() {
// String url = "http://www.xxx.com/abc.zip";
//
// File downloadToFile = new File("/xxx/abc.zip");
//
//
// JbootHttpRequest request = JbootHttpRequest.create(url, null, JbootHttpRequest.METHOD_GET);
// request.setDownloadFile(downloadToFile);
//
//
// JbootHttpResponse response = Jboot.me().getHttp().handle(request);
//
// if (response.isError()){
// downloadToFile.delete();
// }
//
// System.out.println(downloadToFile.length());
}
|
#vulnerable code
@Test
public void testHttpDownload() {
String url = "http://www.xxx.com/abc.zip";
File downloadToFile = new File("/xxx/abc.zip");
JbootHttpRequest request = JbootHttpRequest.create(url, null, JbootHttpRequest.METHOD_GET);
request.setDownloadFile(downloadToFile);
JbootHttpResponse response = Jboot.me().getHttp().handle(request);
if (response.isError()){
downloadToFile.delete();
}
System.out.println(downloadToFile.length());
}
#location 13
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public BaseVO refreshForTemplate(HttpServletRequest request){
BaseVO vo = new BaseVO();
Site site = Func.getCurrentSite();
if(site == null){
vo.setBaseVO(BaseVO.FAILURE, "尚未登陆");
return vo;
}
TemplateCMS template = new TemplateCMS(site);
//取得当前网站所有模版页面
// TemplatePageListVO templatePageListVO = templateService.getTemplatePageListByCache(request);
//取得当前网站首页模版页面
TemplatePageVO templatePageIndexVO = templateService.getTemplatePageIndexByCache(request);
//取得网站所有栏目信息
List<SiteColumn> siteColumnList = sqlDAO.findBySqlQuery("SELECT * FROM site_column WHERE siteid = "+site.getId()+" ORDER BY rank ASC", SiteColumn.class);
//取得网站所有文章News信息
List<News> newsList = sqlDAO.findBySqlQuery("SELECT * FROM news WHERE siteid = "+site.getId() + " AND status = "+News.STATUS_NORMAL+" ORDER BY addtime DESC", News.class);
List<NewsData> newsDataList = sqlDAO.findBySqlQuery("SELECT news_data.* FROM news,news_data WHERE news.siteid = "+site.getId() + " AND news.status = "+News.STATUS_NORMAL+" AND news.id = news_data.id ORDER BY news.id DESC", NewsData.class);
//对栏目进行重新调整,以栏目codeName为key,将栏目加入进Map中。用codeName来取栏目
Map<String, SiteColumn> columnMap = new HashMap<String, SiteColumn>();
//对文章-栏目进行分类,以栏目codeName为key,将文章List加入进自己对应的栏目。同时,若传入父栏目代码,其栏目下有多个新闻子栏目,会调出所有子栏目的内容(20条以内)
Map<String, List<News>> columnNewsMap = new HashMap<String, List<News>>();
for (int i = 0; i < siteColumnList.size(); i++) { //遍历栏目,将对应的文章加入其所属栏目
SiteColumn siteColumn = siteColumnList.get(i);
List<News> nList = new ArrayList<News>();
for (int j = 0; j < newsList.size(); j++) {
News news = newsList.get(j);
if(news.getCid() - siteColumn.getId() == 0){
nList.add(news);
newsList.remove(j); //将已经加入Map的文章从newsList中移除,提高效率。同时j--。
j--;
continue;
}
}
//默认是按照时间倒序,但是v4.4以后,用户可以自定义,可以根据时间正序排序,如果不是默认的倒序的话,就需要重新排序
//这里是某个具体子栏目的排序,父栏目排序调整的在下面
if(siteColumn.getListRank() != null && siteColumn.getListRank() - SiteColumn.LIST_RANK_ADDTIME_ASC == 0 ){
Collections.sort(nList, new Comparator<News>() {
public int compare(News n1, News n2) {
//按照发布时间正序排序,发布时间越早,排列越靠前
return n1.getAddtime() - n2.getAddtime();
}
});
}
columnMap.put(siteColumn.getCodeName(), siteColumn);
columnNewsMap.put(siteColumn.getCodeName(), nList);
}
//对 newsDataList 网站文章的内容进行调整,调整为map key:newsData.id value:newsData.text
Map<Integer, NewsDataBean> newsDataMap = new HashMap<Integer, NewsDataBean>();
for (int i = 0; i < newsDataList.size(); i++) {
NewsData nd = newsDataList.get(i);
newsDataMap.put(nd.getId(), new NewsDataBean(nd));
}
/*
* 对栏目进行上下级初始化,找到哪个是父级栏目,哪些是子栏目。并可以根据栏目代码来获取父栏目下的自栏目。 获取栏目树
*/
Map<String, SiteColumnTreeVO> columnTreeMap = new HashMap<String, SiteColumnTreeVO>(); //栏目树,根据栏目id获取当前栏目,以及下级栏目
//首先,遍历父栏目,将最顶级栏目(一级栏目)拿出来
for (int i = 0; i < siteColumnList.size(); i++) {
SiteColumn siteColumn = siteColumnList.get(i);
//根据父栏目代码,判断是否有上级栏目,若没有的话,那就是顶级栏目了,将其加入栏目树
if(siteColumn.getParentCodeName() == null || siteColumn.getParentCodeName().length() == 0){
SiteColumnTreeVO scTree = new SiteColumnTreeVO();
scTree.setSiteColumn(siteColumn);
scTree.setList(new ArrayList<SiteColumnTreeVO>());
scTree.setLevel(1); //顶级栏目,1级栏目
columnTreeMap.put(siteColumn.getCodeName(), scTree);
}
}
//然后,再遍历父栏目,将二级栏目拿出来
for (int i = 0; i < siteColumnList.size(); i++) {
SiteColumn siteColumn = siteColumnList.get(i);
//判断是否有上级栏目,根据父栏目代码,如果有的话,那就是子栏目了,符合
if(siteColumn.getParentCodeName() != null && siteColumn.getParentCodeName().length() > 0){
SiteColumnTreeVO scTree = new SiteColumnTreeVO();
scTree.setSiteColumn(siteColumn);
scTree.setList(new ArrayList<SiteColumnTreeVO>());
scTree.setLevel(2); //子栏目,二级栏目
if(columnTreeMap.get(siteColumn.getParentCodeName()) != null){
columnTreeMap.get(siteColumn.getParentCodeName()).getList().add(scTree);
}else{
//没有找到该子栏目的父栏目
}
}
}
/*
* 栏目树取完了,接着进行对栏目树内,有子栏目的父栏目,进行信息汇总,将子栏目的信息列表,都合并起来,汇总成一个父栏目的
*/
//对文章-父栏目进行分类,以栏目codeName为key,将每个子栏目的文章加入进总的所属的父栏目的List中,然后进行排序
Map<String, List<com.xnx3.wangmarket.admin.bean.News>> columnTreeNewsMap = new HashMap<String, List<com.xnx3.wangmarket.admin.bean.News>>();
for (Map.Entry<String, SiteColumnTreeVO> entry : columnTreeMap.entrySet()) {
SiteColumnTreeVO sct = entry.getValue();
if(sct.getList().size() > 0){
//有子栏目,才会对其进行数据汇总
columnTreeNewsMap.put(sct.getSiteColumn().getCodeName(), new ArrayList<com.xnx3.wangmarket.admin.bean.News>());
//遍历其子栏目,将每个子栏目的News信息合并在一块,供父栏目直接调用
for (int i = 0; i < sct.getList().size(); i++) {
SiteColumnTreeVO subSct = sct.getList().get(i); //子栏目的栏目信息
//v4.7版本更新,增加判断,只有栏目类型是列表页面的,才会将子栏目的信息合并入父栏目。
if(subSct.getSiteColumn().getType() - SiteColumn.TYPE_LIST == 0){
//将该栏目的News文章,创建一个新的List
List<com.xnx3.wangmarket.admin.bean.News> nList = new ArrayList<com.xnx3.wangmarket.admin.bean.News>();
List<News> oList = columnNewsMap.get(subSct.getSiteColumn().getCodeName());
for (int j = 0; j < oList.size(); j++) {
com.xnx3.wangmarket.admin.bean.News n = new com.xnx3.wangmarket.admin.bean.News();
News news = oList.get(j);
n.setNews(news);
n.setRank(news.getId());
nList.add(n);
}
//将新的List,合并入父栏目CodeName的List
columnTreeNewsMap.get(sct.getSiteColumn().getCodeName()).addAll(nList);
}
}
}
}
//合并完后,对每个父栏目的List进行先后顺序排序
for (Map.Entry<String, SiteColumnTreeVO> entry : columnTreeMap.entrySet()) {
SiteColumnTreeVO sct = entry.getValue();
if(sct.getList().size() > 0){
// Collections.sort(columnTreeNewsMap.get(sct.getSiteColumn().getCodeName()));
Collections.sort(columnTreeNewsMap.get(sct.getSiteColumn().getCodeName()), new Comparator<com.xnx3.wangmarket.admin.bean.News>() {
public int compare(com.xnx3.wangmarket.admin.bean.News n1, com.xnx3.wangmarket.admin.bean.News n2) {
if(sct.getSiteColumn().getListRank() != null && sct.getSiteColumn().getListRank() - SiteColumn.LIST_RANK_ADDTIME_ASC == 0){
//按照发布时间正序排序,发布时间越早,排列越靠前
return n2.getNews().getAddtime() - n1.getNews().getAddtime();
}else{
//按照发布时间倒序排序,发布时间越晚,排列越靠前
return n1.getNews().getAddtime() - n2.getNews().getAddtime();
}
}
});
}
}
//排序完后,将其取出,加入columnNewsMap中,供模版中动态调用父栏目代码,就能直接拿到其的所有子栏目信息数据
for (Map.Entry<String, SiteColumnTreeVO> entry : columnTreeMap.entrySet()) {
SiteColumnTreeVO sct = entry.getValue();
if(sct.getList().size() > 0){
List<com.xnx3.wangmarket.admin.bean.News> nList = columnTreeNewsMap.get(sct.getSiteColumn().getCodeName());
for (int i = nList.size()-1; i >= 0 ; i--) {
columnNewsMap.get(sct.getSiteColumn().getCodeName()).add(nList.get(i).getNews());
}
}
}
/*
* sitemap.xml
*/
String xml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
+ "<urlset\n"
+ "\txmlns=\"http://www.sitemaps.org/schemas/sitemap/0.9\"\n"
+ "\txmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n"
+ "\txsi:schemaLocation=\"http://www.sitemaps.org/schemas/sitemap/0.9\n"
+ "\t\thttp://www.sitemaps.org/schemas/sitemap/0.9/sitemap.xsd\">\n";
//加入首页
String indexUrl = "http://"+Func.getDomain(site);
xml = xml + getSitemapUrl(indexUrl, "1.00");
/*
* 模版替换生成页面的步骤
* 1.替换模版变量的标签
* 1.1 通用标签
* 1.2 动态栏目调用标签 (这里要将动态调用标签最先替换。不然动态调用标签非常可能会生成列表,会增加替换通用标签的数量,所以先替换通用标签后,在替换动态模版调用标签)
* 2.替换列表页模版、详情页模版的标签
* 2.1 通用标签
* 2.2 动态栏目调用标签
* 2.3 模版变量装载
*
*
* 分支--1->生成首页
* 分支--2->生成栏目列表页、详情页
* 分支--2->生成sitemap.xml
*/
//1.替换模版变量的标签,并在替换完毕后将其加入Session缓存
// Map<String, String> varMap = new HashMap<String, String>();
// Map<String, String> varMap = Func.getUserBeanForShiroSession().getTemplateVarDataMapForOriginal();
//v2.24,将模版变量的比对,改为模版页面
TemplatePageListVO tplVO = templateService.getTemplatePageListByCache(request);
if(tplVO == null || tplVO.getList().size() == 0){
vo.setBaseVO(BaseVO.FAILURE, "当前网站尚未选择/导入/增加模版,生成失败!网站有模版后才能根据模版生成整站!");
return vo;
}
//v4.7加入,避免没有模版变量时,生成整站报错
if(Func.getUserBeanForShiroSession().getTemplateVarMapForOriginal() == null){
Func.getUserBeanForShiroSession().setTemplateVarMapForOriginal(new HashMap<String, TemplateVarVO>());
}
for (Map.Entry<String, TemplateVarVO> entry : Func.getUserBeanForShiroSession().getTemplateVarMapForOriginal().entrySet()) {
//替换公共标签
String v = template.replacePublicTag(entry.getValue().getTemplateVarData().getText());
//替换栏目的动态调用标签
v = template.replaceSiteColumnBlock(v, columnNewsMap, columnMap, columnTreeMap, true, null, newsDataMap);
Func.getUserBeanForShiroSession().getTemplateVarCompileDataMap().put(entry.getKey(), v);
}
/*
* 进行第二步,对列表页模版、详情页模版进行通用标签等的替换,将其处理好。等生成的时候,直接取出来替换news、column即可
*/
TemplatePageListVO tpl = templateService.getTemplatePageListByCache(request); //取缓存中的模版页列表
Map<String, String> templateCacheMap = new HashMap<String, String>(); //替换好通用标签等的模版都缓存入此。Map<templatePage.name, templatePageData.text>
for (int i = 0; i < tpl.getList().size(); i++) {
TemplatePageVO tpVO = tpl.getList().get(i);
String text = null; //模版页内容
if(tpVO.getTemplatePageData() == null){
//若缓存中没有缓存上模版页详情,那么从数据库中挨个取出来(暂时先这么做,反正RDS剩余。后续将执行单挑SQL将所有页面一块拿出来再分配)(并且取出来后,要加入缓存,之后在点击生成整站,就不用再去数据库取了)
TemplatePageData tpd = sqlDAO.findById(TemplatePageData.class, tpVO.getTemplatePage().getId());
if(tpd != null){
text = tpd.getText();
}
}else{
text = tpVO.getTemplatePageData().getText();
}
if(text == null){
vo.setBaseVO(BaseVO.FAILURE, "模版页"+tpVO.getTemplatePage().getName()+"的内容不存在!请先检查此模版页");
return vo;
}
//进行2.1、2.2、2.3预操作
//替换公共标签
text = template.replacePublicTag(text);
//替换栏目的动态调用标签
text = template.replaceSiteColumnBlock(text, columnNewsMap, columnMap, columnTreeMap, true, null, newsDataMap);
//装载模版变量
text = template.assemblyTemplateVar(text);
//预处理后,将其缓存入Map,等待生成页面时直接获取
templateCacheMap.put(tpVO.getTemplatePage().getName(), text);
}
//最后还要将获得到的内容缓存入Session,下次就不用去数据库取了
//生成首页
String indexHtml = templateCacheMap.get(templatePageIndexVO.getTemplatePage().getName());
//替换首页中存在的栏目的动态调用标签
indexHtml = template.replaceSiteColumnBlock(indexHtml, columnNewsMap, columnMap, columnTreeMap, true, null, newsDataMap);
indexHtml = template.replacePublicTag(indexHtml); //替换公共标签
//生成首页保存到OSS或本地盘
AttachmentFile.putStringFile("site/"+site.getId()+"/index.html", indexHtml);
/*
* 生成栏目、内容页面
*/
//遍历出所有列表栏目
for (SiteColumn siteColumn : columnMap.values()) {
if(siteColumn.getCodeName() == null || siteColumn.getCodeName().length() == 0){
vo.setBaseVO(BaseVO.FAILURE, "栏目["+siteColumn.getName()+"]的“栏目代码”不存在,请先为其设置栏目代码");
return vo;
}
//取得当前栏目下的News列表
List<News> columnNewsList = columnNewsMap.get(siteColumn.getCodeName());
//获取当前栏目的内容页模版
String viewTemplateHtml = templateCacheMap.get(siteColumn.getTemplatePageViewName());
if(viewTemplateHtml == null){
vo.setBaseVO(BaseVO.FAILURE, "栏目["+siteColumn.getName()+"]未绑定页面内容模版,请去绑定");
return vo;
}
//替换内容模版中的动态栏目调用(动态标签引用)
viewTemplateHtml = template.replaceSiteColumnBlock(viewTemplateHtml, columnNewsMap, columnMap, columnTreeMap, false, siteColumn, newsDataMap);
//如果是新闻或者图文列表,那么才会生成栏目列表页面
if(siteColumn.getType() - SiteColumn.TYPE_LIST == 0 || siteColumn.getType() - SiteColumn.TYPE_NEWS == 0 || siteColumn.getType() - SiteColumn.TYPE_IMAGENEWS == 0){
//当前栏目的列表模版
String listTemplateHtml = templateCacheMap.get(siteColumn.getTemplatePageListName());
if(listTemplateHtml == null){
vo.setBaseVO(BaseVO.FAILURE, "栏目["+siteColumn.getName()+"]未绑定模版列表页面,请去绑定");
return vo;
}
//替换列表模版中的动态栏目调用(动态标签引用)
listTemplateHtml = template.replaceSiteColumnBlock(listTemplateHtml, columnNewsMap, columnMap, columnTreeMap, false, siteColumn, newsDataMap);
//生成其列表页面
template.generateListHtmlForWholeSite(listTemplateHtml, siteColumn, columnNewsList, newsDataMap);
//XML加入栏目页面
xml = xml + getSitemapUrl(indexUrl+"/"+template.generateSiteColumnListPageHtmlName(siteColumn, 1)+".html", "0.4");
/*
* 生成当前栏目的内容页面
*/
//判断栏目属性中,是否设置了生成内容详情页面, v4.7增加
if(siteColumn.getUseGenerateView() == null || siteColumn.getUseGenerateView() - SiteColumn.USED_ENABLE == 0){
for (int i = 0; i < columnNewsList.size(); i++) {
News news = columnNewsList.get(i);
if(siteColumn.getId() - news.getCid() == 0){
//当前文章是此栏目的,那么生成文章详情。不然是不生成的,免得在父栏目中生成子栏目的页面,导致siteColumn调用出现错误
//列表页的内容详情页面,还会有上一篇、下一篇的功能
News upNews = null;
News nextNews = null;
if(i > 0){
upNews = columnNewsList.get(i-1);
}
if((i+1) < columnNewsList.size()){
nextNews = columnNewsList.get(i+1);
}
//生成内容页面
template.generateViewHtmlForTemplateForWholeSite(news, siteColumn, newsDataMap.get(news.getId()), viewTemplateHtml, upNews, nextNews);
//XML加入内容页面
xml = xml + getSitemapUrl(indexUrl+"/"+template.generateNewsPageHtmlName(siteColumn, news)+".html", "0.5");
}
}
}
}else if(siteColumn.getType() - SiteColumn.TYPE_ALONEPAGE == 0 || siteColumn.getType() - SiteColumn.TYPE_PAGE == 0){
//独立页面,只生成内容模版
if(siteColumn.getEditMode() - SiteColumn.EDIT_MODE_TEMPLATE == 0){
//模版式编辑,无 news , 则直接生成
template.generateViewHtmlForTemplateForWholeSite(null, siteColumn, new NewsDataBean(null), viewTemplateHtml, null, null);
//独立页面享有更大的权重,赋予其 0.8
xml = xml + getSitemapUrl(indexUrl+"/"+template.generateNewsPageHtmlName(siteColumn, null)+".html", "0.8");
}else{
//UEditor、输入模型编辑方式
for (int i = 0; i < columnNewsList.size(); i++) {
News news = columnNewsList.get(i);
template.generateViewHtmlForTemplateForWholeSite(news, siteColumn, newsDataMap.get(news.getId()), viewTemplateHtml, null, null);
//独立页面享有更大的权重,赋予其 0.8
xml = xml + getSitemapUrl(indexUrl+"/"+template.generateNewsPageHtmlName(siteColumn, news)+".html", "0.8");
}
}
}else{
//其他栏目不管,当然,也没有其他类型栏目了,v4.6版本更新后,CMS模式一共就这两种类型的
}
}
//生成 sitemap.xml
xml = xml + "</urlset>";
AttachmentFile.putStringFile("site/"+site.getId()+"/sitemap.xml", xml);
return new BaseVO();
}
|
#vulnerable code
public BaseVO refreshForTemplate(HttpServletRequest request){
BaseVO vo = new BaseVO();
Site site = Func.getCurrentSite();
TemplateCMS template = new TemplateCMS(site);
//取得当前网站所有模版页面
// TemplatePageListVO templatePageListVO = templateService.getTemplatePageListByCache(request);
//取得当前网站首页模版页面
TemplatePageVO templatePageIndexVO = templateService.getTemplatePageIndexByCache(request);
//取得网站所有栏目信息
List<SiteColumn> siteColumnList = sqlDAO.findBySqlQuery("SELECT * FROM site_column WHERE siteid = "+site.getId()+" ORDER BY rank ASC", SiteColumn.class);
//取得网站所有文章News信息
List<News> newsList = sqlDAO.findBySqlQuery("SELECT * FROM news WHERE siteid = "+site.getId() + " AND status = "+News.STATUS_NORMAL+" ORDER BY addtime DESC", News.class);
List<NewsData> newsDataList = sqlDAO.findBySqlQuery("SELECT news_data.* FROM news,news_data WHERE news.siteid = "+site.getId() + " AND news.status = "+News.STATUS_NORMAL+" AND news.id = news_data.id ORDER BY news.id DESC", NewsData.class);
//对栏目进行重新调整,以栏目codeName为key,将栏目加入进Map中。用codeName来取栏目
Map<String, SiteColumn> columnMap = new HashMap<String, SiteColumn>();
//对文章-栏目进行分类,以栏目codeName为key,将文章List加入进自己对应的栏目。同时,若传入父栏目代码,其栏目下有多个新闻子栏目,会调出所有子栏目的内容(20条以内)
Map<String, List<News>> columnNewsMap = new HashMap<String, List<News>>();
for (int i = 0; i < siteColumnList.size(); i++) { //遍历栏目,将对应的文章加入其所属栏目
SiteColumn siteColumn = siteColumnList.get(i);
List<News> nList = new ArrayList<News>();
for (int j = 0; j < newsList.size(); j++) {
News news = newsList.get(j);
if(news.getCid() - siteColumn.getId() == 0){
nList.add(news);
newsList.remove(j); //将已经加入Map的文章从newsList中移除,提高效率。同时j--。
j--;
continue;
}
}
//默认是按照时间倒序,但是v4.4以后,用户可以自定义,可以根据时间正序排序,如果不是默认的倒序的话,就需要重新排序
//这里是某个具体子栏目的排序,父栏目排序调整的在下面
if(siteColumn.getListRank() != null && siteColumn.getListRank() - SiteColumn.LIST_RANK_ADDTIME_ASC == 0 ){
Collections.sort(nList, new Comparator<News>() {
public int compare(News n1, News n2) {
//按照发布时间正序排序,发布时间越早,排列越靠前
return n1.getAddtime() - n2.getAddtime();
}
});
}
columnMap.put(siteColumn.getCodeName(), siteColumn);
columnNewsMap.put(siteColumn.getCodeName(), nList);
}
//对 newsDataList 网站文章的内容进行调整,调整为map key:newsData.id value:newsData.text
Map<Integer, NewsDataBean> newsDataMap = new HashMap<Integer, NewsDataBean>();
for (int i = 0; i < newsDataList.size(); i++) {
NewsData nd = newsDataList.get(i);
newsDataMap.put(nd.getId(), new NewsDataBean(nd));
}
/*
* 对栏目进行上下级初始化,找到哪个是父级栏目,哪些是子栏目。并可以根据栏目代码来获取父栏目下的自栏目。 获取栏目树
*/
Map<String, SiteColumnTreeVO> columnTreeMap = new HashMap<String, SiteColumnTreeVO>(); //栏目树,根据栏目id获取当前栏目,以及下级栏目
//首先,遍历父栏目,将最顶级栏目(一级栏目)拿出来
for (int i = 0; i < siteColumnList.size(); i++) {
SiteColumn siteColumn = siteColumnList.get(i);
//根据父栏目代码,判断是否有上级栏目,若没有的话,那就是顶级栏目了,将其加入栏目树
if(siteColumn.getParentCodeName() == null || siteColumn.getParentCodeName().length() == 0){
SiteColumnTreeVO scTree = new SiteColumnTreeVO();
scTree.setSiteColumn(siteColumn);
scTree.setList(new ArrayList<SiteColumnTreeVO>());
scTree.setLevel(1); //顶级栏目,1级栏目
columnTreeMap.put(siteColumn.getCodeName(), scTree);
}
}
//然后,再遍历父栏目,将二级栏目拿出来
for (int i = 0; i < siteColumnList.size(); i++) {
SiteColumn siteColumn = siteColumnList.get(i);
//判断是否有上级栏目,根据父栏目代码,如果有的话,那就是子栏目了,符合
if(siteColumn.getParentCodeName() != null && siteColumn.getParentCodeName().length() > 0){
SiteColumnTreeVO scTree = new SiteColumnTreeVO();
scTree.setSiteColumn(siteColumn);
scTree.setList(new ArrayList<SiteColumnTreeVO>());
scTree.setLevel(2); //子栏目,二级栏目
if(columnTreeMap.get(siteColumn.getParentCodeName()) != null){
columnTreeMap.get(siteColumn.getParentCodeName()).getList().add(scTree);
}else{
//没有找到该子栏目的父栏目
}
}
}
/*
* 栏目树取完了,接着进行对栏目树内,有子栏目的父栏目,进行信息汇总,将子栏目的信息列表,都合并起来,汇总成一个父栏目的
*/
//对文章-父栏目进行分类,以栏目codeName为key,将每个子栏目的文章加入进总的所属的父栏目的List中,然后进行排序
Map<String, List<com.xnx3.wangmarket.admin.bean.News>> columnTreeNewsMap = new HashMap<String, List<com.xnx3.wangmarket.admin.bean.News>>();
for (Map.Entry<String, SiteColumnTreeVO> entry : columnTreeMap.entrySet()) {
SiteColumnTreeVO sct = entry.getValue();
if(sct.getList().size() > 0){
//有子栏目,才会对其进行数据汇总
columnTreeNewsMap.put(sct.getSiteColumn().getCodeName(), new ArrayList<com.xnx3.wangmarket.admin.bean.News>());
//遍历其子栏目,将每个子栏目的News信息合并在一块,供父栏目直接调用
for (int i = 0; i < sct.getList().size(); i++) {
SiteColumnTreeVO subSct = sct.getList().get(i); //子栏目的栏目信息
//v4.7版本更新,增加判断,只有栏目类型是列表页面的,才会将子栏目的信息合并入父栏目。
if(subSct.getSiteColumn().getType() - SiteColumn.TYPE_LIST == 0){
//将该栏目的News文章,创建一个新的List
List<com.xnx3.wangmarket.admin.bean.News> nList = new ArrayList<com.xnx3.wangmarket.admin.bean.News>();
List<News> oList = columnNewsMap.get(subSct.getSiteColumn().getCodeName());
for (int j = 0; j < oList.size(); j++) {
com.xnx3.wangmarket.admin.bean.News n = new com.xnx3.wangmarket.admin.bean.News();
News news = oList.get(j);
n.setNews(news);
n.setRank(news.getId());
nList.add(n);
}
//将新的List,合并入父栏目CodeName的List
columnTreeNewsMap.get(sct.getSiteColumn().getCodeName()).addAll(nList);
}
}
}
}
//合并完后,对每个父栏目的List进行先后顺序排序
for (Map.Entry<String, SiteColumnTreeVO> entry : columnTreeMap.entrySet()) {
SiteColumnTreeVO sct = entry.getValue();
if(sct.getList().size() > 0){
// Collections.sort(columnTreeNewsMap.get(sct.getSiteColumn().getCodeName()));
Collections.sort(columnTreeNewsMap.get(sct.getSiteColumn().getCodeName()), new Comparator<com.xnx3.wangmarket.admin.bean.News>() {
public int compare(com.xnx3.wangmarket.admin.bean.News n1, com.xnx3.wangmarket.admin.bean.News n2) {
if(sct.getSiteColumn().getListRank() != null && sct.getSiteColumn().getListRank() - SiteColumn.LIST_RANK_ADDTIME_ASC == 0){
//按照发布时间正序排序,发布时间越早,排列越靠前
return n2.getNews().getAddtime() - n1.getNews().getAddtime();
}else{
//按照发布时间倒序排序,发布时间越晚,排列越靠前
return n1.getNews().getAddtime() - n2.getNews().getAddtime();
}
}
});
}
}
//排序完后,将其取出,加入columnNewsMap中,供模版中动态调用父栏目代码,就能直接拿到其的所有子栏目信息数据
for (Map.Entry<String, SiteColumnTreeVO> entry : columnTreeMap.entrySet()) {
SiteColumnTreeVO sct = entry.getValue();
if(sct.getList().size() > 0){
List<com.xnx3.wangmarket.admin.bean.News> nList = columnTreeNewsMap.get(sct.getSiteColumn().getCodeName());
for (int i = nList.size()-1; i >= 0 ; i--) {
columnNewsMap.get(sct.getSiteColumn().getCodeName()).add(nList.get(i).getNews());
}
}
}
/*
* sitemap.xml
*/
String xml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
+ "<urlset\n"
+ "\txmlns=\"http://www.sitemaps.org/schemas/sitemap/0.9\"\n"
+ "\txmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n"
+ "\txsi:schemaLocation=\"http://www.sitemaps.org/schemas/sitemap/0.9\n"
+ "\t\thttp://www.sitemaps.org/schemas/sitemap/0.9/sitemap.xsd\">\n";
//加入首页
String indexUrl = "http://"+Func.getDomain(site);
xml = xml + getSitemapUrl(indexUrl, "1.00");
/*
* 模版替换生成页面的步骤
* 1.替换模版变量的标签
* 1.1 通用标签
* 1.2 动态栏目调用标签 (这里要将动态调用标签最先替换。不然动态调用标签非常可能会生成列表,会增加替换通用标签的数量,所以先替换通用标签后,在替换动态模版调用标签)
* 2.替换列表页模版、详情页模版的标签
* 2.1 通用标签
* 2.2 动态栏目调用标签
* 2.3 模版变量装载
*
*
* 分支--1->生成首页
* 分支--2->生成栏目列表页、详情页
* 分支--2->生成sitemap.xml
*/
//1.替换模版变量的标签,并在替换完毕后将其加入Session缓存
// Map<String, String> varMap = new HashMap<String, String>();
// Map<String, String> varMap = Func.getUserBeanForShiroSession().getTemplateVarDataMapForOriginal();
//v2.24,将模版变量的比对,改为模版页面
TemplatePageListVO tplVO = templateService.getTemplatePageListByCache(request);
if(tplVO == null || tplVO.getList().size() == 0){
vo.setBaseVO(BaseVO.FAILURE, "当前网站尚未选择/导入/增加模版,生成失败!网站有模版后才能根据模版生成整站!");
return vo;
}
//当网站只有一个首页时,是不需要这个的。所以只需要上面的,判断一下是否有模版页就够了。 v2.24更新
if(Func.getUserBeanForShiroSession().getTemplateVarMapForOriginal() != null){ //v4.7加入,避免只有一个首页时,生成整站第一次报错
}
for (Map.Entry<String, TemplateVarVO> entry : Func.getUserBeanForShiroSession().getTemplateVarMapForOriginal().entrySet()) {
//替换公共标签
String v = template.replacePublicTag(entry.getValue().getTemplateVarData().getText());
//替换栏目的动态调用标签
v = template.replaceSiteColumnBlock(v, columnNewsMap, columnMap, columnTreeMap, true, null, newsDataMap);
Func.getUserBeanForShiroSession().getTemplateVarCompileDataMap().put(entry.getKey(), v);
}
/*
* 进行第二步,对列表页模版、详情页模版进行通用标签等的替换,将其处理好。等生成的时候,直接取出来替换news、column即可
*/
TemplatePageListVO tpl = templateService.getTemplatePageListByCache(request); //取缓存中的模版页列表
Map<String, String> templateCacheMap = new HashMap<String, String>(); //替换好通用标签等的模版都缓存入此。Map<templatePage.name, templatePageData.text>
for (int i = 0; i < tpl.getList().size(); i++) {
TemplatePageVO tpVO = tpl.getList().get(i);
String text = null; //模版页内容
if(tpVO.getTemplatePageData() == null){
//若缓存中没有缓存上模版页详情,那么从数据库中挨个取出来(暂时先这么做,反正RDS剩余。后续将执行单挑SQL将所有页面一块拿出来再分配)(并且取出来后,要加入缓存,之后在点击生成整站,就不用再去数据库取了)
TemplatePageData tpd = sqlDAO.findById(TemplatePageData.class, tpVO.getTemplatePage().getId());
if(tpd != null){
text = tpd.getText();
}
}else{
text = tpVO.getTemplatePageData().getText();
}
if(text == null){
vo.setBaseVO(BaseVO.FAILURE, "模版页"+tpVO.getTemplatePage().getName()+"的内容不存在!请先检查此模版页");
return vo;
}
//进行2.1、2.2、2.3预操作
//替换公共标签
text = template.replacePublicTag(text);
//替换栏目的动态调用标签
text = template.replaceSiteColumnBlock(text, columnNewsMap, columnMap, columnTreeMap, true, null, newsDataMap);
//装载模版变量
text = template.assemblyTemplateVar(text);
//预处理后,将其缓存入Map,等待生成页面时直接获取
templateCacheMap.put(tpVO.getTemplatePage().getName(), text);
}
//最后还要将获得到的内容缓存入Session,下次就不用去数据库取了
//生成首页
String indexHtml = templateCacheMap.get(templatePageIndexVO.getTemplatePage().getName());
//替换首页中存在的栏目的动态调用标签
indexHtml = template.replaceSiteColumnBlock(indexHtml, columnNewsMap, columnMap, columnTreeMap, true, null, newsDataMap);
indexHtml = template.replacePublicTag(indexHtml); //替换公共标签
//生成首页保存到OSS或本地盘
AttachmentFile.putStringFile("site/"+site.getId()+"/index.html", indexHtml);
/*
* 生成栏目、内容页面
*/
//遍历出所有列表栏目
for (SiteColumn siteColumn : columnMap.values()) {
if(siteColumn.getCodeName() == null || siteColumn.getCodeName().length() == 0){
vo.setBaseVO(BaseVO.FAILURE, "栏目["+siteColumn.getName()+"]的“栏目代码”不存在,请先为其设置栏目代码");
return vo;
}
//取得当前栏目下的News列表
List<News> columnNewsList = columnNewsMap.get(siteColumn.getCodeName());
//获取当前栏目的内容页模版
String viewTemplateHtml = templateCacheMap.get(siteColumn.getTemplatePageViewName());
if(viewTemplateHtml == null){
vo.setBaseVO(BaseVO.FAILURE, "栏目["+siteColumn.getName()+"]未绑定页面内容模版,请去绑定");
return vo;
}
//替换内容模版中的动态栏目调用(动态标签引用)
viewTemplateHtml = template.replaceSiteColumnBlock(viewTemplateHtml, columnNewsMap, columnMap, columnTreeMap, false, siteColumn, newsDataMap);
//如果是新闻或者图文列表,那么才会生成栏目列表页面
if(siteColumn.getType() - SiteColumn.TYPE_LIST == 0 || siteColumn.getType() - SiteColumn.TYPE_NEWS == 0 || siteColumn.getType() - SiteColumn.TYPE_IMAGENEWS == 0){
//当前栏目的列表模版
String listTemplateHtml = templateCacheMap.get(siteColumn.getTemplatePageListName());
if(listTemplateHtml == null){
vo.setBaseVO(BaseVO.FAILURE, "栏目["+siteColumn.getName()+"]未绑定模版列表页面,请去绑定");
return vo;
}
//替换列表模版中的动态栏目调用(动态标签引用)
listTemplateHtml = template.replaceSiteColumnBlock(listTemplateHtml, columnNewsMap, columnMap, columnTreeMap, false, siteColumn, newsDataMap);
//生成其列表页面
template.generateListHtmlForWholeSite(listTemplateHtml, siteColumn, columnNewsList, newsDataMap);
//XML加入栏目页面
xml = xml + getSitemapUrl(indexUrl+"/"+template.generateSiteColumnListPageHtmlName(siteColumn, 1)+".html", "0.4");
/*
* 生成当前栏目的内容页面
*/
//判断栏目属性中,是否设置了生成内容详情页面, v4.7增加
if(siteColumn.getUseGenerateView() == null || siteColumn.getUseGenerateView() - SiteColumn.USED_ENABLE == 0){
for (int i = 0; i < columnNewsList.size(); i++) {
News news = columnNewsList.get(i);
if(siteColumn.getId() - news.getCid() == 0){
//当前文章是此栏目的,那么生成文章详情。不然是不生成的,免得在父栏目中生成子栏目的页面,导致siteColumn调用出现错误
//列表页的内容详情页面,还会有上一篇、下一篇的功能
News upNews = null;
News nextNews = null;
if(i > 0){
upNews = columnNewsList.get(i-1);
}
if((i+1) < columnNewsList.size()){
nextNews = columnNewsList.get(i+1);
}
//生成内容页面
template.generateViewHtmlForTemplateForWholeSite(news, siteColumn, newsDataMap.get(news.getId()), viewTemplateHtml, upNews, nextNews);
//XML加入内容页面
xml = xml + getSitemapUrl(indexUrl+"/"+template.generateNewsPageHtmlName(siteColumn, news)+".html", "0.5");
}
}
}
}else if(siteColumn.getType() - SiteColumn.TYPE_ALONEPAGE == 0 || siteColumn.getType() - SiteColumn.TYPE_PAGE == 0){
//独立页面,只生成内容模版
if(siteColumn.getEditMode() - SiteColumn.EDIT_MODE_TEMPLATE == 0){
//模版式编辑,无 news , 则直接生成
template.generateViewHtmlForTemplateForWholeSite(null, siteColumn, new NewsDataBean(null), viewTemplateHtml, null, null);
//独立页面享有更大的权重,赋予其 0.8
xml = xml + getSitemapUrl(indexUrl+"/"+template.generateNewsPageHtmlName(siteColumn, null)+".html", "0.8");
}else{
//UEditor、输入模型编辑方式
for (int i = 0; i < columnNewsList.size(); i++) {
News news = columnNewsList.get(i);
template.generateViewHtmlForTemplateForWholeSite(news, siteColumn, newsDataMap.get(news.getId()), viewTemplateHtml, null, null);
//独立页面享有更大的权重,赋予其 0.8
xml = xml + getSitemapUrl(indexUrl+"/"+template.generateNewsPageHtmlName(siteColumn, news)+".html", "0.8");
}
}
}else{
//其他栏目不管,当然,也没有其他类型栏目了,v4.6版本更新后,CMS模式一共就这两种类型的
}
}
//生成 sitemap.xml
xml = xml + "</urlset>";
AttachmentFile.putStringFile("site/"+site.getId()+"/sitemap.xml", xml);
return new BaseVO();
}
#location 11
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void disconnect() throws InterruptedException,
ServiceLocatorException {
try {
synchronized (this) {
if (LOG.isLoggable(Level.FINE)) {
LOG.log(Level.FINE, "Start disconnect session");
}
blockedByRunUpOperation = true;
disconnect(false);
blockedByRunUpOperation = false;
if (LOG.isLoggable(Level.FINER)) {
LOG.log(Level.FINER, "End disconnect session");
}
}
} catch (InterruptedException e) {
blockedByRunUpOperation = false;
throw e;
} catch (ServiceLocatorException e) {
blockedByRunUpOperation = false;
throw e;
} catch (Exception e) {
blockedByRunUpOperation = false;
if (LOG.isLoggable(Level.SEVERE)) {
LOG.log(Level.SEVERE, "Connect not passed: " + e.getMessage());
}
}
}
|
#vulnerable code
public void disconnect() throws InterruptedException,
ServiceLocatorException {
disconnect(true, false);
}
#location 3
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testReframe() throws Exception {
final ZMTPFramingDecoder decoder = new ZMTPFramingDecoder(wireFormat(ZMTP10), new RawDecoder());
final ZMTPWriter writer = ZMTPWriter.create(ZMTP10);
final ByteBuf buf = Unpooled.buffer();
writer.reset(buf);
// Request a frame with margin in anticipation of a larger payload...
// ... but write a smaller payload
final ByteBuf content = copiedBuffer("hello world", UTF_8);
writer.frame(content.readableBytes() * 2, true).writeBytes(content.duplicate());
// And rewrite the frame accordingly
writer.reframe(content.readableBytes(), false);
// Verify that the message can be parsed
decoder.decode(null, buf, out);
assertThat(out, hasSize(1));
assertThat(out, contains((Object) singletonList(content)));
// Write and verify another message
final ByteBuf next = copiedBuffer("next", UTF_8);
writer.frame(next.readableBytes(), false).writeBytes(next.duplicate());
out.clear();
decoder.decode(null, buf, out);
assertThat(out, hasSize(1));
assertThat(out, contains((Object) singletonList(next)));
}
|
#vulnerable code
@Test
public void testReframe() throws Exception {
final ZMTPParser parser = ZMTPParser.create(ZMTP10, new RawDecoder());
final ZMTPWriter writer = ZMTPWriter.create(ZMTP10);
final ByteBuf buf = Unpooled.buffer();
writer.reset(buf);
// Request a frame with margin in anticipation of a larger payload...
// ... but write a smaller payload
final ByteBuf content = copiedBuffer("hello world", UTF_8);
writer.frame(content.readableBytes() * 2, true).writeBytes(content.duplicate());
// And rewrite the frame accordingly
writer.reframe(content.readableBytes(), false);
// Verify that the message can be parsed
parser.parse(buf, out);
assertThat(out, hasSize(1));
assertThat(out, contains((Object) singletonList(content)));
// Write and verify another message
final ByteBuf next = copiedBuffer("next", UTF_8);
writer.frame(next.readableBytes(), false).writeBytes(next.duplicate());
out.clear();
parser.parse(buf, out);
assertThat(out, hasSize(1));
assertThat(out, contains((Object) singletonList(next)));
}
#location 3
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testOneFrame() throws Exception {
final ZMTPWriter writer = ZMTPWriter.create(ZMTP10);
final ByteBuf buf = Unpooled.buffer();
writer.reset(buf);
ByteBuf frame = writer.frame(11, false);
assertThat(frame, is(sameInstance(buf)));
final ByteBuf content = copiedBuffer("hello world", UTF_8);
frame.writeBytes(content.duplicate());
final ZMTPFramingDecoder decoder = new ZMTPFramingDecoder(wireFormat(ZMTP10), new RawDecoder());
decoder.decode(null, buf, out);
assertThat(out, hasSize(1));
assertThat(out, contains((Object) singletonList(content)));
}
|
#vulnerable code
@Test
public void testOneFrame() throws Exception {
final ZMTPWriter writer = ZMTPWriter.create(ZMTP10);
final ByteBuf buf = Unpooled.buffer();
writer.reset(buf);
ByteBuf frame = writer.frame(11, false);
assertThat(frame, is(sameInstance(buf)));
final ByteBuf content = copiedBuffer("hello world", UTF_8);
frame.writeBytes(content.duplicate());
final ZMTPParser parser = ZMTPParser.create(ZMTP10, new RawDecoder());
parser.parse(buf, out);
assertThat(out, hasSize(1));
assertThat(out, contains((Object) singletonList(content)));
}
#location 14
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testTwoFrames() throws Exception {
final ZMTPMessageDecoder decoder = new ZMTPMessageDecoder();
final ByteBuf f0 = Unpooled.copiedBuffer("hello", UTF_8);
final ByteBuf f1 = Unpooled.copiedBuffer("world", UTF_8);
final List<Object> out = Lists.newArrayList();
decoder.header(f0.readableBytes(), true, out);
decoder.content(f0, out);
decoder.header(f1.readableBytes(), false, out);
decoder.content(f1, out);
decoder.finish(out);
final Object expected = ZMTPMessage.fromUTF8(ALLOC, "hello", "world");
assertThat(out, hasSize(1));
assertThat(out, contains(expected));
}
|
#vulnerable code
@Test
public void testTwoFrames() throws Exception {
final ZMTPMessageDecoder decoder = new ZMTPMessageDecoder();
final ByteBuf f0 = Unpooled.copiedBuffer("hello", UTF_8);
final ByteBuf f1 = Unpooled.copiedBuffer("world", UTF_8);
final List<Object> out = Lists.newArrayList();
decoder.header(f0.readableBytes(), true, out);
decoder.content(f0, out);
decoder.header(f1.readableBytes(), false, out);
decoder.content(f1, out);
decoder.finish(out);
final Object expected = new ZMTPIncomingMessage(fromUTF8(ALLOC, "hello", "world"), false, 10);
assertThat(out, hasSize(1));
assertThat(out, contains(expected));
}
#location 13
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public final Cache buildCache(String name) throws CacheException {
if (log.isDebugEnabled()) {
log.debug("Loading a new EhCache cache named [" + name + "]");
}
try {
net.sf.ehcache.Cache cache = getCacheManager().getCache(name);
if (cache == null) {
if (log.isWarnEnabled()) {
log.warn("Could not find a specific ehcache configuration for cache named [" + name + "]; using defaults.");
}
if ( name.equals(DEFAULT_ACTIVE_SESSIONS_CACHE_NAME) ) {
if ( log.isInfoEnabled() ) {
log.info("Creating " + DEFAULT_ACTIVE_SESSIONS_CACHE_NAME + " cache with default JSecurity " +
"session cache settings." );
}
cache = buildDefaultActiveSessionsCache();
manager.addCache( cache );
} else {
manager.addCache( name );
cache = manager.getCache( name );
}
if (log.isDebugEnabled()) {
log.debug("Started EHCache named [" + name + "]");
}
}
return new EhCache(cache);
} catch (net.sf.ehcache.CacheException e) {
throw new CacheException(e);
}
}
|
#vulnerable code
public final Cache buildCache(String name) throws CacheException {
if (log.isDebugEnabled()) {
log.debug("Loading a new EhCache cache named [" + name + "]");
}
try {
net.sf.ehcache.Cache cache = getCacheManager().getCache(name);
if (cache == null) {
if (log.isWarnEnabled()) {
log.warn("Could not find a specific ehcache configuration for cache named [" + name + "]; using defaults.");
}
if ( name.equals(DEFAULT_ACTIVE_SESSIONS_CACHE_NAME) ) {
if ( log.isInfoEnabled() ) {
log.info("Creating " + DEFAULT_ACTIVE_SESSIONS_CACHE_NAME + " cache with default JSecurity " +
"session cache settings." );
}
cache = buildDefaultActiveSessionsCache();
manager.addCache( cache );
} else {
manager.addCache( name );
cache = manager.getCache( name );
}
cache.initialise();
if (log.isDebugEnabled()) {
log.debug("Started EHCache named [" + name + "]");
}
}
return new EhCache(cache);
} catch (net.sf.ehcache.CacheException e) {
throw new CacheException(e);
}
}
#location 24
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public static void bindPrincipalsToSessionIfNecessary( HttpServletRequest request ) {
SecurityContext ctx = (SecurityContext) ThreadContext.get( ThreadContext.SECURITY_CONTEXT_KEY );
if ( ctx != null ) {
Session session = ThreadLocalSecurityContext.current().getSession();
if( session != null && session.getAttribute( PRINCIPALS_SESSION_KEY) == null ) {
session.setAttribute( PRINCIPALS_SESSION_KEY, ctx.getAllPrincipals() );
} else {
HttpSession httpSession = request.getSession();
if( httpSession.getAttribute( PRINCIPALS_SESSION_KEY ) == null ) {
httpSession.setAttribute( PRINCIPALS_SESSION_KEY, ctx.getAllPrincipals() );
}
}
}
}
|
#vulnerable code
public static void bindPrincipalsToSessionIfNecessary( HttpServletRequest request ) {
SecurityContext ctx = (SecurityContext) ThreadContext.get( ThreadContext.SECURITY_CONTEXT_KEY );
if ( ctx != null ) {
Session session = ThreadLocalSecurityContext.current().getSession( false );
if( session != null && session.getAttribute( PRINCIPALS_SESSION_KEY) == null ) {
session.setAttribute( PRINCIPALS_SESSION_KEY, ctx.getAllPrincipals() );
} else {
HttpSession httpSession = request.getSession();
if( httpSession.getAttribute( PRINCIPALS_SESSION_KEY ) == null ) {
httpSession.setAttribute( PRINCIPALS_SESSION_KEY, ctx.getAllPrincipals() );
}
}
}
}
#location 5
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public int onDoStartTag() throws JspException {
if ( getSecurityContext() != null && getSecurityContext().isAuthenticated() ) {
return TagSupport.EVAL_BODY_INCLUDE;
} else {
return TagSupport.SKIP_BODY;
}
}
|
#vulnerable code
public int onDoStartTag() throws JspException {
if ( getSecurityContext().isAuthenticated() ) {
return TagSupport.EVAL_BODY_INCLUDE;
} else {
return TagSupport.SKIP_BODY;
}
}
#location 2
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void create( Session session ) {
Serializable id = session.getSessionId();
if ( id == null ) {
String msg = "session must be assigned an id. Please check assignId( Session s ) " +
"implementation.";
throw new IllegalStateException( msg );
}
if ( activeSessions.containsKey( id ) || stoppedSessions.containsKey( id ) ) {
String msg = "There is an existing session already created with session id [" +
id + "]. Session Id's must be unique.";
throw new IllegalArgumentException( msg );
}
synchronized ( activeSessions ) {
activeSessions.put( id, session );
}
}
|
#vulnerable code
public void create( Session session ) {
assignId( session );
Serializable id = session.getSessionId();
if ( id == null ) {
String msg = "session must be assigned an id. Please check assignId( Session s ) " +
"implementation.";
throw new IllegalStateException( msg );
}
if ( activeSessions.containsKey( id ) || stoppedSessions.containsKey( id ) ) {
String msg = "There is an existing session already created with session id [" +
id + "]. Session Id's must be unique.";
throw new IllegalArgumentException( msg );
}
synchronized ( activeSessions ) {
activeSessions.put( id, session );
}
}
#location 2
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public int onDoStartTag() throws JspException {
String strValue = null;
if( getSecurityContext() != null && getSecurityContext().isAuthenticated() ) {
// Get the principal to print out
Principal principal;
if( type == null ) {
principal = getSecurityContext().getPrincipal();
} else {
principal = getSecurityContext().getPrincipalByType( type );
}
// Get the string value of the principal
if( principal != null ) {
if( property == null ) {
strValue = principal.toString();
} else {
strValue = getPrincipalProperty( principal, property );
}
}
}
// Print out the principal value if not null
if( strValue != null ) {
try {
pageContext.getOut().write( strValue );
} catch (IOException e) {
throw new JspTagException( "Error writing [" + strValue + "] to JSP.", e );
}
}
return SKIP_BODY;
}
|
#vulnerable code
public int onDoStartTag() throws JspException {
String strValue = null;
if( getSecurityContext().isAuthenticated() ) {
// Get the principal to print out
Principal principal;
if( type == null ) {
principal = getSecurityContext().getPrincipal();
} else {
principal = getSecurityContext().getPrincipalByType( type );
}
// Get the string value of the principal
if( principal != null ) {
if( property == null ) {
strValue = principal.toString();
} else {
strValue = getPrincipalProperty( principal, property );
}
}
}
// Print out the principal value if not null
if( strValue != null ) {
try {
pageContext.getOut().write( strValue );
} catch (IOException e) {
throw new JspTagException( "Error writing [" + strValue + "] to JSP.", e );
}
}
return SKIP_BODY;
}
#location 4
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
protected boolean showTagBody( Permission p ) {
boolean permitted = getSecurityContext() != null && getSecurityContext().implies( p );
return !permitted;
}
|
#vulnerable code
protected boolean showTagBody( Permission p ) {
boolean permitted = getSecurityContext().implies( p );
return !permitted;
}
#location 2
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void send( AuthenticationEvent event ) {
if ( listeners != null && !listeners.isEmpty() ) {
for ( AuthenticationEventListener ael : listeners ) {
ael.onEvent( event );
}
} else {
if ( log.isWarnEnabled() ) {
String msg = "internal listeners collection is null. No " +
"AuthenticationEventListeners will be notified of event [" +
event + "]";
log.warn( msg );
}
}
}
|
#vulnerable code
public void send( AuthenticationEvent event ) {
if ( listeners != null && !listeners.isEmpty() ) {
synchronized ( listeners ) {
for ( AuthenticationEventListener ael : listeners ) {
if ( event instanceof SuccessfulAuthenticationEvent) {
ael.accountAuthenticated( event );
} else if ( event instanceof UnlockedAccountEvent) {
ael.accountUnlocked( event );
} else if ( event instanceof LogoutEvent) {
ael.accountLoggedOut( event );
} else if ( event instanceof FailedAuthenticationEvent) {
FailedAuthenticationEvent failedEvent = (FailedAuthenticationEvent)event;
AuthenticationException cause = failedEvent.getCause();
if ( cause != null && ( cause instanceof LockedAccountException ) ) {
ael.accountLocked( event );
} else {
ael.authenticationFailed( event );
}
} else {
String msg = "Received argument of type [" + event.getClass() + "]. This " +
"implementation can only send event instances of types " +
SuccessfulAuthenticationEvent.class.getName() + ", " +
FailedAuthenticationEvent.class.getName() + ", " +
UnlockedAccountEvent.class.getName() + ", or " +
LogoutEvent.class.getName();
throw new IllegalArgumentException( msg );
}
}
}
} else {
if ( log.isWarnEnabled() ) {
String msg = "internal listeners collection is null. No " +
"AuthenticationEventListeners will be notified of event [" +
event + "]";
log.warn( msg );
}
}
}
#location 3
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@SuppressWarnings({"unchecked"})
public boolean isAccessAllowed(ServletRequest request, ServletResponse response, Object mappedValue) throws IOException {
Subject subject = getSubject(request, response);
String[] rolesArray = (String[]) mappedValue;
if (rolesArray == null || rolesArray.length == 0) {
//no roles specified, so nothing to check - allow access.
return true;
}
Set<String> roles = CollectionUtils.asSet(rolesArray);
return subject.hasAllRoles(roles);
}
|
#vulnerable code
@SuppressWarnings({"unchecked"})
public boolean isAccessAllowed(ServletRequest request, ServletResponse response, Object mappedValue) throws IOException {
Subject subject = getSubject(request, response);
Set<String> roles = (Set<String>) mappedValue;
boolean hasRoles = true;
if (roles != null && !roles.isEmpty()) {
if (roles.size() == 1) {
if (!subject.hasRole(roles.iterator().next())) {
hasRoles = false;
}
} else {
if (!subject.hasAllRoles(roles)) {
hasRoles = false;
}
}
}
return hasRoles;
}
#location 10
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void send( SessionEvent event ) {
if ( listeners != null && !listeners.isEmpty() ) {
for( SessionEventListener sel : listeners ) {
sel.onEvent( event );
}
}
}
|
#vulnerable code
public void send( SessionEvent event ) {
synchronized( listeners ) {
for( SessionEventListener sel : listeners ) {
if ( event instanceof StartedSessionEvent) {
sel.sessionStarted( event );
} else if ( event instanceof ExpiredSessionEvent) {
sel.sessionExpired( event );
} else if ( event instanceof StoppedSessionEvent) {
sel.sessionStopped( event );
} else {
String msg = "Received argument of type [" + event.getClass() + "]. This " +
"implementation can only send event instances of types " +
StartedSessionEvent.class.getName() + ", " +
ExpiredSessionEvent.class.getName() + ", or " +
StoppedSessionEvent.class.getName();
throw new IllegalArgumentException( msg );
}
}
}
}
#location 2
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@SuppressWarnings({"unchecked"})
public boolean isAccessAllowed(ServletRequest request, ServletResponse response, Object mappedValue) throws IOException {
Subject subject = getSubject(request, response);
String[] rolesArray = (String[]) mappedValue;
if (rolesArray == null || rolesArray.length == 0) {
//no roles specified, so nothing to check - allow access.
return true;
}
Set<String> roles = CollectionUtils.asSet(rolesArray);
return subject.hasAllRoles(roles);
}
|
#vulnerable code
@SuppressWarnings({"unchecked"})
public boolean isAccessAllowed(ServletRequest request, ServletResponse response, Object mappedValue) throws IOException {
Subject subject = getSubject(request, response);
Set<String> roles = (Set<String>) mappedValue;
boolean hasRoles = true;
if (roles != null && !roles.isEmpty()) {
if (roles.size() == 1) {
if (!subject.hasRole(roles.iterator().next())) {
hasRoles = false;
}
} else {
if (!subject.hasAllRoles(roles)) {
hasRoles = false;
}
}
}
return hasRoles;
}
#location 14
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
protected boolean showTagBody( String roleName ) {
return getSecurityContext() != null && getSecurityContext().hasRole( roleName );
}
|
#vulnerable code
protected boolean showTagBody( String roleName ) {
return getSecurityContext().hasRole( roleName );
}
#location 2
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Test
public void testDefaultConfig() {
Subject subject = sm.getSubject();
AuthenticationToken token = new UsernamePasswordToken("guest", "guest");
subject.login(token);
assertTrue(subject.isAuthenticated());
assertTrue("guest".equals(subject.getPrincipal()));
assertTrue(subject.hasRole("guest"));
Session session = subject.getSession();
session.setAttribute("key", "value");
assertEquals(session.getAttribute("key"), "value");
subject.logout();
assertNull(subject.getSession(false));
assertNull(subject.getPrincipal());
assertNull(subject.getPrincipals());
}
|
#vulnerable code
@Test
public void testDefaultConfig() {
Subject subject = SecurityUtils.getSubject();
AuthenticationToken token = new UsernamePasswordToken("guest", "guest");
subject.login(token);
assertTrue(subject.isAuthenticated());
assertTrue("guest".equals(subject.getPrincipal()));
assertTrue(subject.hasRole("guest"));
Session session = subject.getSession();
session.setAttribute("key", "value");
assertEquals(session.getAttribute("key"), "value");
subject.logout();
assertNull(subject.getSession(false));
assertNull(subject.getPrincipal());
assertNull(subject.getPrincipals());
}
#location 6
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
DbAction.Insert<?> createInsert(String propertyName, Object value, @Nullable Object key) {
DbAction.Insert<Object> insert = new DbAction.Insert<>(value,
context.getPersistentPropertyPath(propertyName, DummyEntity.class), rootInsert);
insert.getQualifiers().put(toPath(propertyName), key);
return insert;
}
|
#vulnerable code
DbAction.Insert<?> createDeepInsert(String propertyName, Object value, Object key,
@Nullable DbAction.Insert<?> parentInsert) {
PersistentPropertyPath<RelationalPersistentProperty> propertyPath = toPath(parentInsert.getPropertyPath().toDotPath() + "." + propertyName);
DbAction.Insert<Object> insert = new DbAction.Insert<>(value, propertyPath, parentInsert);
insert.getQualifiers().put(propertyPath, key);
return insert;
}
#location 4
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void load() {
Optional<String> region = loadRegion();
if (region.isPresent()) {
this.region = region.get();
} else {
this.region = DEFAULT_REGION;
LOG.warn("Could not load region configuration. Please ensure AWS CLI is " +
"configured via 'aws configure'. Will use default region of " + this.region);
}
}
|
#vulnerable code
public void load() {
String home = System.getProperty("user.home");
Properties awsConfigProperties = new Properties();
try {
// todo: use default profile
awsConfigProperties.load(new FileInputStream(home + "/.aws/config"));
} catch (IOException e) {
throw new RuntimeException("Could not load configuration. Please run 'aws configure'");
}
String region = awsConfigProperties.getProperty("region");
if (region != null) {
this.region = region;
} else {
LOG.warn("Could not load region configuration. Please ensure AWS CLI is configured via 'aws configure'. Will use default region of " + this.region);
}
}
#location 8
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
SegmentView getActiveSegmentView() { return segmentlist.get(segmentlist.size()-1); }
|
#vulnerable code
EntryLocation getLocationForOffset(long offset)
{
EntryLocation ret = new EntryLocation();
SegmentView sv = this.getSegmentForOffset(offset);
long reloff = offset - sv.startoff;
//select the group using a simple modulo mapping function
int gnum = (int)(reloff%sv.numgroups);
ret.group = sv.groups[gnum];
ret.relativeOff = reloff/sv.numgroups + ret.group.localstartoff;
log.info("location({}): seg.startOff={} gnum={} group-startOff={} relativeOff={} ",
offset,
sv.startoff,
gnum,
ret.group.localstartoff, ret.relativeOff);
return ret;
}
#location 5
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void testTwitterAlbums() throws IOException {
if (!DOWNLOAD_CONTENT) {
return;
}
List<URL> contentURLs = new ArrayList<URL>();
//contentURLs.add(new URL("https://twitter.com/danngamber01/media"));
contentURLs.add(new URL("https://twitter.com/search?q=from%3Apurrbunny%20filter%3Aimages&src=typd"));
for (URL url : contentURLs) {
try {
TwitterRipper ripper = new TwitterRipper(url);
ripper.rip();
assert(ripper.getWorkingDir().listFiles().length > 1);
deleteDir(ripper.getWorkingDir());
} catch (Exception e) {
e.printStackTrace();
fail("Error while ripping URL " + url + ": " + e.getMessage());
}
}
}
|
#vulnerable code
public void testTwitterAlbums() throws IOException {
List<URL> contentURLs = new ArrayList<URL>();
//contentURLs.add(new URL("https://twitter.com/danngamber01/media"));
contentURLs.add(new URL("https://twitter.com/search?q=from%3Apurrbunny%20filter%3Aimages&src=typd"));
for (URL url : contentURLs) {
try {
TwitterRipper ripper = new TwitterRipper(url);
ripper.rip();
assert(ripper.getWorkingDir().listFiles().length > 1);
deleteDir(ripper.getWorkingDir());
} catch (Exception e) {
e.printStackTrace();
fail("Error while ripping URL " + url + ": " + e.getMessage());
}
}
}
#location 9
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private static List<Constructor<?>> getRipperConstructors() throws Exception {
List<Constructor<?>> constructors = new ArrayList<Constructor<?>>();
for (Class<?> clazz : getClassesForPackage("com.rarchives.ripme.ripper.rippers")) {
if (AbstractRipper.class.isAssignableFrom(clazz)) {
constructors.add( (Constructor<?>) clazz.getConstructor(URL.class) );
}
}
return constructors;
}
|
#vulnerable code
private static List<Constructor<?>> getRipperConstructors() throws Exception {
List<Constructor<?>> constructors = new ArrayList<Constructor<?>>();
String rippersPackage = "com.rarchives.ripme.ripper.rippers";
ClassLoader cl = Thread.currentThread().getContextClassLoader();
Enumeration<URL> urls = cl.getResources(rippersPackage.replaceAll("\\.", "/"));
if (!urls.hasMoreElements()) {
return constructors;
}
URL classURL = urls.nextElement();
for (File f : new File(classURL.toURI()).listFiles()) {
String className = f.getName();
if (!className.endsWith(".class")
|| className.contains("$")
|| className.endsWith("Test.class")) {
// Ignore non-class or nested classes.
continue;
}
className = className.substring(0, className.length() - 6); // Strip .class
String fqname = rippersPackage + "." + className;
Class<?> clazz = Class.forName(fqname);
constructors.add( (Constructor<?>) clazz.getConstructor(URL.class));
}
return constructors;
}
#location 10
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void testXvideosRipper() throws IOException {
if (!DOWNLOAD_CONTENT) {
return;
}
List<URL> contentURLs = new ArrayList<URL>();
contentURLs.add(new URL("http://www.xvideos.com/video1428195/stephanie_first_time_anal"));
contentURLs.add(new URL("http://www.xvideos.com/video7136868/vid-20140205-wa0011"));
for (URL url : contentURLs) {
try {
XvideosRipper ripper = new XvideosRipper(url);
ripper.rip();
assert(ripper.getWorkingDir().listFiles().length > 1);
deleteDir(ripper.getWorkingDir());
} catch (Exception e) {
e.printStackTrace();
fail("Error while ripping URL " + url + ": " + e.getMessage());
}
}
}
|
#vulnerable code
public void testXvideosRipper() throws IOException {
if (false && !DOWNLOAD_CONTENT) {
return;
}
List<URL> contentURLs = new ArrayList<URL>();
contentURLs.add(new URL("http://www.xvideos.com/video1428195/stephanie_first_time_anal"));
contentURLs.add(new URL("http://www.xvideos.com/video7136868/vid-20140205-wa0011"));
for (URL url : contentURLs) {
try {
XvideosRipper ripper = new XvideosRipper(url);
ripper.rip();
assert(ripper.getWorkingDir().listFiles().length > 1);
deleteDir(ripper.getWorkingDir());
} catch (Exception e) {
e.printStackTrace();
fail("Error while ripping URL " + url + ": " + e.getMessage());
}
}
}
#location 12
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void downloadProblem(URL url, String message) {
if (observer == null) {
return;
}
synchronized(observer) {
itemsPending.remove(url);
itemsErrored.put(url, message);
observer.update(this, new RipStatusMessage(STATUS.DOWNLOAD_WARN, url + " : " + message));
observer.notifyAll();
}
checkIfComplete();
}
|
#vulnerable code
public void downloadProblem(URL url, String message) {
if (observer == null) {
return;
}
synchronized(observer) {
itemsPending.remove(url);
itemsErrored.put(url, message);
observer.update(this, new RipStatusMessage(STATUS.DOWNLOAD_WARN, url + " : " + message));
observer.notifyAll();
checkIfComplete();
}
}
#location 5
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void testRedditAlbums() throws IOException {
if (!DOWNLOAD_CONTENT) {
return;
}
List<URL> contentURLs = new ArrayList<URL>();
//contentURLs.add(new URL("http://www.reddit.com/r/nsfw_oc"));
//contentURLs.add(new URL("http://www.reddit.com/r/nsfw_oc/top?t=all"));
//contentURLs.add(new URL("http://www.reddit.com/u/gingerpuss"));
contentURLs.add(new URL("http://www.reddit.com/r/UnrealGirls/comments/1ziuhl/in_class_veronique_popa/"));
for (URL url : contentURLs) {
try {
RedditRipper ripper = new RedditRipper(url);
ripper.rip();
assert(ripper.getWorkingDir().listFiles().length > 1);
deleteDir(ripper.getWorkingDir());
} catch (Exception e) {
e.printStackTrace();
fail("Error while ripping URL " + url + ": " + e.getMessage());
}
}
}
|
#vulnerable code
public void testRedditAlbums() throws IOException {
if (false && !DOWNLOAD_CONTENT) {
return;
}
List<URL> contentURLs = new ArrayList<URL>();
//contentURLs.add(new URL("http://www.reddit.com/r/nsfw_oc"));
//contentURLs.add(new URL("http://www.reddit.com/r/nsfw_oc/top?t=all"));
//contentURLs.add(new URL("http://www.reddit.com/u/gingerpuss"));
contentURLs.add(new URL("http://www.reddit.com/r/UnrealGirls/comments/1ziuhl/in_class_veronique_popa/"));
for (URL url : contentURLs) {
try {
RedditRipper ripper = new RedditRipper(url);
ripper.rip();
assert(ripper.getWorkingDir().listFiles().length > 1);
deleteDir(ripper.getWorkingDir());
} catch (Exception e) {
e.printStackTrace();
fail("Error while ripping URL " + url + ": " + e.getMessage());
}
}
}
#location 14
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
private void login() throws IOException {
try {
String dACookies = Utils.getConfigString(utilsKey, null);
this.cookies = dACookies != null ? deserialize(dACookies) : null;
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
if (this.cookies == null) {
LOGGER.info("Log in now");
// Do login now
// Load login page
Response res = Http.url("https://www.deviantart.com/users/login").connection().method(Method.GET)
.referrer(referer).userAgent(userAgent).execute();
// Find tokens
Document doc = res.parse();
Element form = doc.getElementById("login");
String token = form.select("input[name=\"validate_token\"]").first().attr("value");
String key = form.select("input[name=\"validate_key\"]").first().attr("value");
LOGGER.info("Token: " + token + " & Key: " + key);
// Build Login Data
HashMap<String, String> loginData = new HashMap<String, String>();
loginData.put("challenge", "");
loginData.put("username", username);
loginData.put("password", password);
loginData.put("remember_me", "1");
loginData.put("validate_token", token);
loginData.put("validate_key", key);
Map<String, String> cookies = res.cookies();
// Log in using data. Handle redirect
res = Http.url("https://www.deviantart.com/users/login").connection().referrer(referer).userAgent(userAgent)
.method(Method.POST).data(loginData).cookies(cookies).followRedirects(false).execute();
this.cookies = res.cookies();
res = Http.url(res.header("location")).connection().referrer(referer).userAgent(userAgent)
.method(Method.GET).cookies(cookies).followRedirects(false).execute();
// Store cookies
updateCookie(res.cookies());
// Apply agegate
this.cookies.put("agegate_state", "1");
// Write Cookie to file for other RipMe Instances or later use
Utils.setConfigString(utilsKey, serialize(new HashMap<String, String>(this.cookies)));
Utils.saveConfig(); // save now because of other instances that might work simultaneously
}
LOGGER.info("DA Cookies: " + this.cookies);
}
|
#vulnerable code
private void login() throws IOException {
File f = new File("DACookie.toDelete");
if (!f.exists()) {
f.createNewFile();
f.deleteOnExit();
// Load login page
Response res = Http.url("https://www.deviantart.com/users/login").connection().method(Method.GET)
.referrer(referer).userAgent(userAgent).execute();
// Find tokens
Document doc = res.parse();
Element form = doc.getElementById("login");
String token = form.select("input[name=\"validate_token\"]").first().attr("value");
String key = form.select("input[name=\"validate_key\"]").first().attr("value");
System.out.println(
"------------------------------" + token + " & " + key + "------------------------------");
// Build Login Data
HashMap<String, String> loginData = new HashMap<String, String>();
loginData.put("challenge", "");
loginData.put("username", username);
loginData.put("password", password);
loginData.put("remember_me", "1");
loginData.put("validate_token", token);
loginData.put("validate_key", key);
Map<String, String> cookies = res.cookies();
// Log in using data. Handle redirect
res = Http.url("https://www.deviantart.com/users/login").connection().referrer(referer).userAgent(userAgent)
.method(Method.POST).data(loginData).cookies(cookies).followRedirects(false).execute();
this.cookies = res.cookies();
res = Http.url(res.header("location")).connection().referrer(referer).userAgent(userAgent)
.method(Method.GET).cookies(cookies).followRedirects(false).execute();
// Store cookies
updateCookie(res.cookies());
// Apply agegate
this.cookies.put("agegate_state", "1");
// Write Cookie to file for other RipMe Instances
try {
FileOutputStream fileOut = new FileOutputStream(f);
ObjectOutputStream out = new ObjectOutputStream(fileOut);
out.writeObject(this.cookies);
out.close();
fileOut.close();
} catch (IOException i) {
i.printStackTrace();
}
} else {
// When cookie file already exists (from another RipMe instance)
while (this.cookies == null) {
try {
Thread.sleep(2000);
FileInputStream fileIn = new FileInputStream(f);
ObjectInputStream in = new ObjectInputStream(fileIn);
this.cookies = (Map<String, String>) in.readObject();
in.close();
fileIn.close();
} catch (IOException | ClassNotFoundException | InterruptedException i) {
i.printStackTrace();
}
}
}
System.out.println("------------------------------" + this.cookies + "------------------------------");
}
#location 66
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public List<String> getURLsFromPage(Document doc) {
List<String> result = new ArrayList<String>();
for (Element el : doc.select("a.image-container > img")) {
String imageSource = el.attr("src");
// We remove the .md from images so we download the full size image
// not the medium ones
imageSource = imageSource.replace(".md", "");
result.add(imageSource);
}
return result;
}
|
#vulnerable code
@Override
public List<String> getURLsFromPage(Document doc) {
List<String> result = new ArrayList<String>();
Document userpage_doc;
// We check for the following string to see if this is a user page or not
if (doc.toString().contains("content=\"gallery\"")) {
for (Element elem : doc.select("a.image-container")) {
String link = elem.attr("href");
logger.info("Grabbing album " + link);
try {
userpage_doc = Http.url(link).get();
} catch(IOException e){
logger.warn("Failed to log link in Jsoup");
userpage_doc = null;
e.printStackTrace();
}
for (Element element : userpage_doc.select("a.image-container > img")) {
String imageSource = element.attr("src");
logger.info("Found image " + link);
// We remove the .md from images so we download the full size image
// not the medium ones
imageSource = imageSource.replace(".md", "");
result.add(imageSource);
}
}
}
else {
for (Element el : doc.select("a.image-container > img")) {
String imageSource = el.attr("src");
// We remove the .md from images so we download the full size image
// not the medium ones
imageSource = imageSource.replace(".md", "");
result.add(imageSource);
}
}
return result;
}
#location 17
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public String[] getDescription(String url,Document page) {
if (isThisATest()) {
return null;
}
try {
// Fetch the image page
Response resp = Http.url(url)
.referrer(this.url)
.cookies(cookies)
.response();
cookies.putAll(resp.cookies());
// Try to find the description
Document documentz = resp.parse();
Element ele = documentz.select("div.dev-description").first();
if (ele == null) {
throw new IOException("No description found");
}
documentz.outputSettings(new Document.OutputSettings().prettyPrint(false));
ele.select("br").append("\\n");
ele.select("p").prepend("\\n\\n");
String fullSize = null;
Element thumb = page.select("div.zones-container span.thumb[href=\"" + url + "\"]").get(0);
if (!thumb.attr("data-super-full-img").isEmpty()) {
fullSize = thumb.attr("data-super-full-img");
String[] split = fullSize.split("/");
fullSize = split[split.length - 1];
} else {
String spanUrl = thumb.attr("href");
fullSize = jsonToImage(page,spanUrl.substring(spanUrl.lastIndexOf('-') + 1));
if (fullSize != null) {
String[] split = fullSize.split("/");
fullSize = split[split.length - 1];
}
}
if (fullSize == null) {
return new String[] {Jsoup.clean(ele.html().replaceAll("\\\\n", System.getProperty("line.separator")), "", Whitelist.none(), new Document.OutputSettings().prettyPrint(false))};
}
fullSize = fullSize.substring(0, fullSize.lastIndexOf("."));
return new String[] {Jsoup.clean(ele.html().replaceAll("\\\\n", System.getProperty("line.separator")), "", Whitelist.none(), new Document.OutputSettings().prettyPrint(false)),fullSize};
// TODO Make this not make a newline if someone just types \n into the description.
} catch (IOException ioe) {
logger.info("Failed to get description at " + url + ": '" + ioe.getMessage() + "'");
return null;
}
}
|
#vulnerable code
@Override
public String[] getDescription(String url,Document page) {
if (isThisATest()) {
return null;
}
try {
// Fetch the image page
Response resp = Http.url(url)
.referrer(this.url)
.cookies(cookies)
.response();
cookies.putAll(resp.cookies());
// Try to find the description
Elements els = resp.parse().select("div[class=dev-description]");
if (els.size() == 0) {
throw new IOException("No description found");
}
Document documentz = resp.parse();
Element ele = documentz.select("div[class=dev-description]").get(0);
documentz.outputSettings(new Document.OutputSettings().prettyPrint(false));
ele.select("br").append("\\n");
ele.select("p").prepend("\\n\\n");
String fullSize = null;
Element thumb = page.select("div.zones-container span.thumb[href=\"" + url + "\"]").get(0);
if (!thumb.attr("data-super-full-img").isEmpty()) {
fullSize = thumb.attr("data-super-full-img");
String[] split = fullSize.split("/");
fullSize = split[split.length - 1];
} else {
String spanUrl = thumb.attr("href");
fullSize = jsonToImage(page,spanUrl.substring(spanUrl.lastIndexOf('-') + 1));
String[] split = fullSize.split("/");
fullSize = split[split.length - 1];
}
if (fullSize == null) {
return new String[] {Jsoup.clean(ele.html().replaceAll("\\\\n", System.getProperty("line.separator")), "", Whitelist.none(), new Document.OutputSettings().prettyPrint(false))};
}
fullSize = fullSize.substring(0, fullSize.lastIndexOf("."));
return new String[] {Jsoup.clean(ele.html().replaceAll("\\\\n", System.getProperty("line.separator")), "", Whitelist.none(), new Document.OutputSettings().prettyPrint(false)),fullSize};
// TODO Make this not make a newline if someone just types \n into the description.
} catch (IOException ioe) {
logger.info("Failed to get description " + page + " : '" + ioe.getMessage() + "'");
return null;
}
}
#location 28
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void run() {
long fileSize = 0;
int bytesTotal = 0;
int bytesDownloaded = 0;
if (saveAs.exists() && observer.tryResumeDownload()) {
fileSize = saveAs.length();
}
try {
observer.stopCheck();
} catch (IOException e) {
observer.downloadErrored(url, "Download interrupted");
return;
}
if (saveAs.exists() && !observer.tryResumeDownload() || Utils.fuzzyExists(new File(saveAs.getParent()), saveAs.getName())) {
if (Utils.getConfigBoolean("file.overwrite", false)) {
logger.info("[!] Deleting existing file" + prettySaveAs);
saveAs.delete();
} else {
logger.info("[!] Skipping " + url + " -- file already exists: " + prettySaveAs);
observer.downloadExists(url, saveAs);
return;
}
}
URL urlToDownload = this.url;
boolean redirected = false;
int tries = 0; // Number of attempts to download
do {
tries += 1;
InputStream bis = null; OutputStream fos = null;
try {
logger.info(" Downloading file: " + urlToDownload + (tries > 0 ? " Retry #" + tries : ""));
observer.sendUpdate(STATUS.DOWNLOAD_STARTED, url.toExternalForm());
// Setup HTTP request
HttpURLConnection huc;
if (this.url.toString().startsWith("https")) {
huc = (HttpsURLConnection) urlToDownload.openConnection();
}
else {
huc = (HttpURLConnection) urlToDownload.openConnection();
}
huc.setInstanceFollowRedirects(true);
huc.setConnectTimeout(TIMEOUT);
huc.setRequestProperty("accept", "*/*");
if (!referrer.equals("")) {
huc.setRequestProperty("Referer", referrer); // Sic
}
huc.setRequestProperty("User-agent", AbstractRipper.USER_AGENT);
String cookie = "";
for (String key : cookies.keySet()) {
if (!cookie.equals("")) {
cookie += "; ";
}
cookie += key + "=" + cookies.get(key);
}
huc.setRequestProperty("Cookie", cookie);
if (observer.tryResumeDownload()) {
if (fileSize != 0) {
huc.setRequestProperty("Range", "bytes=" + fileSize + "-");
}
}
logger.debug("Request properties: " + huc.getRequestProperties());
huc.connect();
int statusCode = huc.getResponseCode();
logger.debug("Status code: " + statusCode);
if (statusCode != 206 && observer.tryResumeDownload() && saveAs.exists()) {
// TODO find a better way to handle servers that don't support resuming downloads then just erroring out
throw new IOException("Server doesn't support resuming downloads");
}
if (statusCode / 100 == 3) { // 3xx Redirect
if (!redirected) {
// Don't increment retries on the first redirect
tries--;
redirected = true;
}
String location = huc.getHeaderField("Location");
urlToDownload = new URL(location);
// Throw exception so download can be retried
throw new IOException("Redirect status code " + statusCode + " - redirect to " + location);
}
if (statusCode / 100 == 4) { // 4xx errors
logger.error("[!] Non-retriable status code " + statusCode + " while downloading from " + url);
observer.downloadErrored(url, "Non-retriable status code " + statusCode + " while downloading " + url.toExternalForm());
return; // Not retriable, drop out.
}
if (statusCode / 100 == 5) { // 5xx errors
observer.downloadErrored(url, "Retriable status code " + statusCode + " while downloading " + url.toExternalForm());
// Throw exception so download can be retried
throw new IOException("Retriable status code " + statusCode);
}
if (huc.getContentLength() == 503 && urlToDownload.getHost().endsWith("imgur.com")) {
// Imgur image with 503 bytes is "404"
logger.error("[!] Imgur image is 404 (503 bytes long): " + url);
observer.downloadErrored(url, "Imgur image is 404: " + url.toExternalForm());
return;
}
// If the ripper is using the bytes progress bar set bytesTotal to huc.getContentLength()
if (observer.useByteProgessBar()) {
bytesTotal = huc.getContentLength();
observer.setBytesTotal(bytesTotal);
observer.sendUpdate(STATUS.TOTAL_BYTES, bytesTotal);
logger.debug("Size of file at " + this.url + " = " + bytesTotal + "b");
}
// Save file
bis = new BufferedInputStream(huc.getInputStream());
// Check if we should get the file ext from the MIME type
if (getFileExtFromMIME) {
String fileExt = URLConnection.guessContentTypeFromStream(bis);
if (fileExt != null) {
fileExt = fileExt.replaceAll("image/", "");
saveAs = new File(saveAs.toString() + "." + fileExt);
} else {
logger.error("Was unable to get content type from stream");
// Try to get the file type from the magic number
byte[] magicBytes = new byte[8];
bis.read(magicBytes,0, 5);
bis.reset();
fileExt = Utils.getEXTFromMagic(magicBytes);
if (fileExt != null) {
saveAs = new File(saveAs.toString() + "." + fileExt);
} else {
logger.error("Was unable to get content type using magic number");
logger.error("Magic number was: " + Arrays.toString(magicBytes));
}
}
}
// If we're resuming a download we append data to the existing file
if (statusCode == 206) {
fos = new FileOutputStream(saveAs, true);
} else {
fos = new FileOutputStream(saveAs);
}
byte[] data = new byte[1024 * 256];
int bytesRead;
while ( (bytesRead = bis.read(data)) != -1) {
try {
observer.stopCheck();
} catch (IOException e) {
observer.downloadErrored(url, "Download interrupted");
return;
}
fos.write(data, 0, bytesRead);
if (observer.useByteProgessBar()) {
bytesDownloaded += bytesRead;
observer.setBytesCompleted(bytesDownloaded);
observer.sendUpdate(STATUS.COMPLETED_BYTES, bytesDownloaded);
}
}
bis.close();
fos.close();
break; // Download successful: break out of infinite loop
} catch (HttpStatusException hse) {
logger.debug("HTTP status exception", hse);
logger.error("[!] HTTP status " + hse.getStatusCode() + " while downloading from " + urlToDownload);
if (hse.getStatusCode() == 404 && Utils.getConfigBoolean("errors.skip404", false)) {
observer.downloadErrored(url, "HTTP status code " + hse.getStatusCode() + " while downloading " + url.toExternalForm());
return;
}
} catch (IOException e) {
logger.debug("IOException", e);
logger.error("[!] Exception while downloading file: " + url + " - " + e.getMessage());
} finally {
// Close any open streams
try {
if (bis != null) { bis.close(); }
} catch (IOException e) { }
try {
if (fos != null) { fos.close(); }
} catch (IOException e) { }
}
if (tries > this.retries) {
logger.error("[!] Exceeded maximum retries (" + this.retries + ") for URL " + url);
observer.downloadErrored(url, "Failed to download " + url.toExternalForm());
return;
}
} while (true);
observer.downloadCompleted(url, saveAs);
logger.info("[+] Saved " + url + " as " + this.prettySaveAs);
}
|
#vulnerable code
public void run() {
long fileSize = 0;
int bytesTotal = 0;
int bytesDownloaded = 0;
if (saveAs.exists() && observer.tryResumeDownload()) {
fileSize = saveAs.length();
}
try {
observer.stopCheck();
} catch (IOException e) {
observer.downloadErrored(url, "Download interrupted");
return;
}
if (saveAs.exists() && !observer.tryResumeDownload()) {
if (Utils.getConfigBoolean("file.overwrite", false)) {
logger.info("[!] Deleting existing file" + prettySaveAs);
saveAs.delete();
} else {
logger.info("[!] Skipping " + url + " -- file already exists: " + prettySaveAs);
observer.downloadExists(url, saveAs);
return;
}
}
URL urlToDownload = this.url;
boolean redirected = false;
int tries = 0; // Number of attempts to download
do {
tries += 1;
InputStream bis = null; OutputStream fos = null;
try {
logger.info(" Downloading file: " + urlToDownload + (tries > 0 ? " Retry #" + tries : ""));
observer.sendUpdate(STATUS.DOWNLOAD_STARTED, url.toExternalForm());
// Setup HTTP request
HttpURLConnection huc;
if (this.url.toString().startsWith("https")) {
huc = (HttpsURLConnection) urlToDownload.openConnection();
}
else {
huc = (HttpURLConnection) urlToDownload.openConnection();
}
huc.setInstanceFollowRedirects(true);
huc.setConnectTimeout(TIMEOUT);
huc.setRequestProperty("accept", "*/*");
if (!referrer.equals("")) {
huc.setRequestProperty("Referer", referrer); // Sic
}
huc.setRequestProperty("User-agent", AbstractRipper.USER_AGENT);
String cookie = "";
for (String key : cookies.keySet()) {
if (!cookie.equals("")) {
cookie += "; ";
}
cookie += key + "=" + cookies.get(key);
}
huc.setRequestProperty("Cookie", cookie);
if (observer.tryResumeDownload()) {
if (fileSize != 0) {
huc.setRequestProperty("Range", "bytes=" + fileSize + "-");
}
}
logger.debug("Request properties: " + huc.getRequestProperties());
huc.connect();
int statusCode = huc.getResponseCode();
logger.debug("Status code: " + statusCode);
if (statusCode != 206 && observer.tryResumeDownload() && saveAs.exists()) {
// TODO find a better way to handle servers that don't support resuming downloads then just erroring out
throw new IOException("Server doesn't support resuming downloads");
}
if (statusCode / 100 == 3) { // 3xx Redirect
if (!redirected) {
// Don't increment retries on the first redirect
tries--;
redirected = true;
}
String location = huc.getHeaderField("Location");
urlToDownload = new URL(location);
// Throw exception so download can be retried
throw new IOException("Redirect status code " + statusCode + " - redirect to " + location);
}
if (statusCode / 100 == 4) { // 4xx errors
logger.error("[!] Non-retriable status code " + statusCode + " while downloading from " + url);
observer.downloadErrored(url, "Non-retriable status code " + statusCode + " while downloading " + url.toExternalForm());
return; // Not retriable, drop out.
}
if (statusCode / 100 == 5) { // 5xx errors
observer.downloadErrored(url, "Retriable status code " + statusCode + " while downloading " + url.toExternalForm());
// Throw exception so download can be retried
throw new IOException("Retriable status code " + statusCode);
}
if (huc.getContentLength() == 503 && urlToDownload.getHost().endsWith("imgur.com")) {
// Imgur image with 503 bytes is "404"
logger.error("[!] Imgur image is 404 (503 bytes long): " + url);
observer.downloadErrored(url, "Imgur image is 404: " + url.toExternalForm());
return;
}
// If the ripper is using the bytes progress bar set bytesTotal to huc.getContentLength()
if (observer.useByteProgessBar()) {
bytesTotal = huc.getContentLength();
observer.setBytesTotal(bytesTotal);
observer.sendUpdate(STATUS.TOTAL_BYTES, bytesTotal);
logger.debug("Size of file at " + this.url + " = " + bytesTotal + "b");
}
// Save file
bis = new BufferedInputStream(huc.getInputStream());
// Check if we should get the file ext from the MIME type
if (getFileExtFromMIME) {
String fileExt = URLConnection.guessContentTypeFromStream(bis);
if (fileExt != null) {
fileExt = fileExt.replaceAll("image/", "");
saveAs = new File(saveAs.toString() + "." + fileExt);
} else {
logger.error("Was unable to get content type from stream");
// Try to get the file type from the magic number
byte[] magicBytes = new byte[8];
bis.read(magicBytes,0, 5);
bis.reset();
fileExt = Utils.getEXTFromMagic(magicBytes);
if (fileExt != null) {
saveAs = new File(saveAs.toString() + "." + fileExt);
} else {
logger.error("Was unable to get content type using magic number");
logger.error("Magic number was: " + Arrays.toString(magicBytes));
}
}
}
// If we're resuming a download we append data to the existing file
if (statusCode == 206) {
fos = new FileOutputStream(saveAs, true);
} else {
fos = new FileOutputStream(saveAs);
}
byte[] data = new byte[1024 * 256];
int bytesRead;
while ( (bytesRead = bis.read(data)) != -1) {
try {
observer.stopCheck();
} catch (IOException e) {
observer.downloadErrored(url, "Download interrupted");
return;
}
fos.write(data, 0, bytesRead);
if (observer.useByteProgessBar()) {
bytesDownloaded += bytesRead;
observer.setBytesCompleted(bytesDownloaded);
observer.sendUpdate(STATUS.COMPLETED_BYTES, bytesDownloaded);
}
}
bis.close();
fos.close();
break; // Download successful: break out of infinite loop
} catch (HttpStatusException hse) {
logger.debug("HTTP status exception", hse);
logger.error("[!] HTTP status " + hse.getStatusCode() + " while downloading from " + urlToDownload);
if (hse.getStatusCode() == 404 && Utils.getConfigBoolean("errors.skip404", false)) {
observer.downloadErrored(url, "HTTP status code " + hse.getStatusCode() + " while downloading " + url.toExternalForm());
return;
}
} catch (IOException e) {
logger.debug("IOException", e);
logger.error("[!] Exception while downloading file: " + url + " - " + e.getMessage());
} finally {
// Close any open streams
try {
if (bis != null) { bis.close(); }
} catch (IOException e) { }
try {
if (fos != null) { fos.close(); }
} catch (IOException e) { }
}
if (tries > this.retries) {
logger.error("[!] Exceeded maximum retries (" + this.retries + ") for URL " + url);
observer.downloadErrored(url, "Failed to download " + url.toExternalForm());
return;
}
} while (true);
observer.downloadCompleted(url, saveAs);
logger.info("[+] Saved " + url + " as " + this.prettySaveAs);
}
#location 108
#vulnerability type RESOURCE_LEAK
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void sendUpdate(STATUS status, Object message) {
if (observer == null) {
return;
}
observer.update(this, new RipStatusMessage(status, message));
}
|
#vulnerable code
public void sendUpdate(STATUS status, Object message) {
if (observer == null) {
return;
}
synchronized (observer) {
observer.update(this, new RipStatusMessage(status, message));
observer.notifyAll();
}
}
#location 5
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void downloadProblem(URL url, String message) {
if (observer == null) {
return;
}
itemsPending.remove(url);
itemsErrored.put(url, message);
observer.update(this, new RipStatusMessage(STATUS.DOWNLOAD_WARN, url + " : " + message));
checkIfComplete();
}
|
#vulnerable code
public void downloadProblem(URL url, String message) {
if (observer == null) {
return;
}
synchronized(observer) {
itemsPending.remove(url);
itemsErrored.put(url, message);
observer.update(this, new RipStatusMessage(STATUS.DOWNLOAD_WARN, url + " : " + message));
observer.notifyAll();
}
checkIfComplete();
}
#location 11
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void replyToStatus(String content, String replyTo) throws ArchivedGroupException, ReplyStatusException {
AbstractStatus abstractStatus = statusRepository.findStatusById(replyTo);
if (abstractStatus != null &&
!abstractStatus.getType().equals(StatusType.STATUS) &&
!abstractStatus.getType().equals(StatusType.SHARE)) {
log.debug("Can not reply to a status of this type");
throw new ReplyStatusException();
}
if (abstractStatus != null &&
abstractStatus.getType().equals(StatusType.SHARE)) {
log.debug("Replacing the share by the original status");
Share share = (Share) abstractStatus;
AbstractStatus abstractRealStatus = statusRepository.findStatusById(share.getOriginalStatusId());
abstractStatus = abstractRealStatus;
}
Status status = (Status) abstractStatus;
Group group = null;
if (status.getGroupId() != null) {
group = groupService.getGroupById(status.getDomain(), status.getGroupId());
if (group.isArchivedGroup()) {
throw new ArchivedGroupException();
}
}
if (!status.getReplyTo().equals("")) {
log.debug("Replacing the status by the status at the origin of the disucssion");
// Original status is also a reply, replying to the real original status instead
AbstractStatus abstractRealOriginalStatus = statusRepository.findStatusById(status.getDiscussionId());
if (abstractRealOriginalStatus == null ||
!abstractRealOriginalStatus.getType().equals(StatusType.STATUS)) {
throw new ReplyStatusException();
}
Status realOriginalStatus = (Status) abstractRealOriginalStatus;
Status replyStatus = createStatus(
content,
realOriginalStatus.getStatusPrivate(),
group,
realOriginalStatus.getStatusId(),
status.getStatusId(),
status.getUsername());
discussionRepository.addReplyToDiscussion(realOriginalStatus.getStatusId(), replyStatus.getStatusId());
} else {
log.debug("Replying directly to the status at the origin of the disucssion");
// The original status of the discussion is the one we reply to
Status replyStatus =
createStatus(content,
status.getStatusPrivate(),
group,
status.getStatusId(),
status.getStatusId(),
status.getUsername());
discussionRepository.addReplyToDiscussion(status.getStatusId(), replyStatus.getStatusId());
}
}
|
#vulnerable code
public void replyToStatus(String content, String replyTo) throws ArchivedGroupException, ReplyStatusException {
AbstractStatus abstractOriginalStatus = statusRepository.findStatusById(replyTo);
if (abstractOriginalStatus != null &&
!abstractOriginalStatus.getType().equals(StatusType.STATUS) &&
!abstractOriginalStatus.getType().equals(StatusType.SHARE)) {
log.debug("Can not reply to a status of this type");
throw new ReplyStatusException();
}
if (abstractOriginalStatus != null &&
abstractOriginalStatus.getType().equals(StatusType.SHARE)) {
Share share = (Share) abstractOriginalStatus;
AbstractStatus abstractRealOriginalStatus = statusRepository.findStatusById(share.getOriginalStatusId());
abstractOriginalStatus = abstractRealOriginalStatus;
}
Status originalStatus = (Status) abstractOriginalStatus;
Group group = null;
if (originalStatus.getGroupId() != null) {
group = groupService.getGroupById(originalStatus.getDomain(), originalStatus.getGroupId());
if (group.isArchivedGroup()) {
throw new ArchivedGroupException();
}
}
if (!originalStatus.getReplyTo().equals("")) {
// Original status is also a reply, replying to the real original status instead
AbstractStatus abstractRealOriginalStatus = statusRepository.findStatusById(originalStatus.getDiscussionId());
if (abstractRealOriginalStatus == null ||
!abstractRealOriginalStatus.getType().equals(StatusType.STATUS)) {
throw new ReplyStatusException();
}
Status realOriginalStatus = (Status) abstractRealOriginalStatus;
Status replyStatus = createStatus(
content,
realOriginalStatus.getStatusPrivate(),
group,
realOriginalStatus.getStatusId(),
originalStatus.getStatusId(),
originalStatus.getUsername());
discussionRepository.addReplyToDiscussion(realOriginalStatus.getStatusId(), replyStatus.getStatusId());
} else {
// The original status of the discussion is the one we reply to
Status replyStatus =
createStatus(content,
originalStatus.getStatusPrivate(),
group,
replyTo,
replyTo,
originalStatus.getUsername());
discussionRepository.addReplyToDiscussion(originalStatus.getStatusId(), replyStatus.getStatusId());
}
}
#location 20
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
@Cacheable("attachment-cache")
public Attachment findAttachmentById(String attachmentId) {
if (attachmentId == null) {
return null;
}
if (log.isDebugEnabled()) {
log.debug("Finding attachment : " + attachmentId);
}
Attachment attachment = this.findAttachmentMetadataById(attachmentId);
if (attachment == null) {
return null;
}
ColumnQuery<String, String, byte[]> queryAttachment = HFactory.createColumnQuery(keyspaceOperator,
StringSerializer.get(), StringSerializer.get(), BytesArraySerializer.get());
HColumn<String, byte[]> columnAttachment =
queryAttachment.setColumnFamily(ATTACHMENT_CF)
.setKey(attachmentId)
.setName(CONTENT)
.execute()
.get();
attachment.setContent(columnAttachment.getValue());
return attachment;
}
|
#vulnerable code
@Override
@Cacheable("attachment-cache")
public Attachment findAttachmentById(String attachmentId) {
if (attachmentId == null) {
return null;
}
if (log.isDebugEnabled()) {
log.debug("Finding attachment : " + attachmentId);
}
Attachment attachment = this.findAttachmentMetadataById(attachmentId);
ColumnQuery<String, String, byte[]> queryAttachment = HFactory.createColumnQuery(keyspaceOperator,
StringSerializer.get(), StringSerializer.get(), BytesArraySerializer.get());
HColumn<String, byte[]> columnAttachment =
queryAttachment.setColumnFamily(ATTACHMENT_CF)
.setKey(attachmentId)
.setName(CONTENT)
.execute()
.get();
attachment.setContent(columnAttachment.getValue());
return attachment;
}
#location 22
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@RequestMapping(value = "/rest/statuses/{statusId}",
method = RequestMethod.PATCH)
@ResponseBody
public StatusDTO updateStatusV3(@RequestBody ActionStatus action, @PathVariable("statusId") String statusId) {
try {
StatusDTO status = timelineService.getStatus(statusId);
if(action.isFavorite() != null && status.isFavorite() != action.isFavorite()){
if(action.isFavorite()){
timelineService.addFavoriteStatus(statusId);
}
else {
timelineService.removeFavoriteStatus(statusId);
}
status.setFavorite(action.isFavorite());
}
if(action.isShared() != null && action.isShared()){
timelineService.shareStatus(statusId);
}
if(action.isAnnounced() != null && action.isAnnounced()){
timelineService.announceStatus(statusId);
}
return status;
} catch (Exception e) {
if (log.isDebugEnabled()) {
e.printStackTrace();
}
return null;
}
}
|
#vulnerable code
@RequestMapping(value = "/rest/statuses/{statusId}",
method = RequestMethod.PATCH)
@ResponseBody
public StatusDTO updateStatusV3(@RequestBody ActionStatus action, @PathVariable("statusId") String statusId) {
try {
StatusDTO status = timelineService.getStatus(statusId);
if(action.isFavorite() != null && status.isFavorite() != action.isFavorite()){
if(action.isFavorite()){
timelineService.addFavoriteStatus(statusId);
}
else {
timelineService.removeFavoriteStatus(statusId);
}
status.setFavorite(action.isFavorite());
}
if(action.isShared() != null && action.isShared()){
timelineService.shareStatus(statusId);
}
return status;
} catch (Exception e) {
if (log.isDebugEnabled()) {
e.printStackTrace();
}
return null;
}
}
#location 7
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void replyToStatus(String content, String replyTo) throws ArchivedGroupException {
Status originalStatus = statusRepository.findStatusById(replyTo);
Group group = null;
if (originalStatus.getGroupId() != null) {
group = groupService.getGroupById(originalStatus.getDomain(), originalStatus.getGroupId());
if (group.isArchivedGroup()) {
throw new ArchivedGroupException();
}
}
if (!originalStatus.getReplyTo().equals("")) {
// Original status is also a reply, replying to the real original status instead
Status realOriginalStatus = statusRepository.findStatusById(originalStatus.getDiscussionId());
Status replyStatus = createStatus(
content,
realOriginalStatus.getStatusPrivate(),
group,
realOriginalStatus.getStatusId(),
originalStatus.getStatusId(),
originalStatus.getUsername());
discussionRepository.addReplyToDiscussion(realOriginalStatus.getStatusId(), replyStatus.getStatusId());
} else {
// The original status of the discussion is the one we reply to
Status replyStatus =
createStatus(content,
originalStatus.getStatusPrivate(),
group,
replyTo,
replyTo,
originalStatus.getUsername());
discussionRepository.addReplyToDiscussion(originalStatus.getStatusId(), replyStatus.getStatusId());
}
}
|
#vulnerable code
public void replyToStatus(String content, String replyTo) throws ArchivedGroupException {
Status originalStatus = statusRepository.findStatusById(replyTo);
Group group = null;
if (originalStatus.getGroupId() != null) {
group = groupService.getGroupById(originalStatus.getDomain(), originalStatus.getGroupId());
}
if (group.isArchivedGroup()) {
throw new ArchivedGroupException();
}
if (!originalStatus.getReplyTo().equals("")) {
// Original status is also a reply, replying to the real original status instead
Status realOriginalStatus = statusRepository.findStatusById(originalStatus.getDiscussionId());
Status replyStatus = createStatus(
content,
realOriginalStatus.getStatusPrivate(),
group,
realOriginalStatus.getStatusId(),
originalStatus.getStatusId(),
originalStatus.getUsername());
discussionRepository.addReplyToDiscussion(realOriginalStatus.getStatusId(), replyStatus.getStatusId());
} else {
// The original status of the discussion is the one we reply to
Status replyStatus =
createStatus(content,
originalStatus.getStatusPrivate(),
group,
replyTo,
replyTo,
originalStatus.getUsername());
discussionRepository.addReplyToDiscussion(originalStatus.getStatusId(), replyStatus.getStatusId());
}
}
#location 7
#vulnerability type NULL_DEREFERENCE
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public T borrowObject(K key, long borrowMaxWait) throws Exception {
assertOpen();
PooledObject<T> p = null;
// Get local copy of current config so it is consistent for entire
// method execution
boolean blockWhenExhausted = getBlockWhenExhausted();
boolean create;
long waitTime = 0;
ObjectDeque<T> objectDeque = register(key);
try {
while (p == null) {
create = false;
if (blockWhenExhausted) {
if (objectDeque != null) {
p = objectDeque.getIdleObjects().pollFirst();
}
if (p == null) {
create = true;
p = create(key);
}
if (p == null && objectDeque != null) {
if (borrowMaxWait < 0) {
p = objectDeque.getIdleObjects().takeFirst();
} else {
waitTime = System.currentTimeMillis();
p = objectDeque.getIdleObjects().pollFirst(
borrowMaxWait, TimeUnit.MILLISECONDS);
waitTime = System.currentTimeMillis() - waitTime;
}
}
if (p == null) {
throw new NoSuchElementException(
"Timeout waiting for idle object");
}
if (!p.allocate()) {
p = null;
}
} else {
if (objectDeque != null) {
p = objectDeque.getIdleObjects().pollFirst();
}
if (p == null) {
create = true;
p = create(key);
}
if (p == null) {
throw new NoSuchElementException("Pool exhausted");
}
if (!p.allocate()) {
p = null;
}
}
if (p != null) {
try {
_factory.activateObject(key, p.getObject());
} catch (Exception e) {
try {
destroy(key, p, true);
} catch (Exception e1) {
// Ignore - activation failure is more important
}
p = null;
if (create) {
NoSuchElementException nsee = new NoSuchElementException(
"Unable to activate object");
nsee.initCause(e);
throw nsee;
}
}
if (p != null && getTestOnBorrow()) {
boolean validate = false;
Throwable validationThrowable = null;
try {
validate = _factory.validateObject(key, p.getObject());
} catch (Throwable t) {
PoolUtils.checkRethrow(t);
}
if (!validate) {
try {
destroy(key, p, true);
destroyedByBorrowValidationCount.incrementAndGet();
} catch (Exception e) {
// Ignore - validation failure is more important
}
p = null;
if (create) {
NoSuchElementException nsee = new NoSuchElementException(
"Unable to validate object");
nsee.initCause(validationThrowable);
throw nsee;
}
}
}
}
}
} finally {
deregister(key);
}
borrowedCount.incrementAndGet();
synchronized (idleTimes) {
idleTimes.add(Long.valueOf(p.getIdleTimeMillis()));
idleTimes.poll();
}
synchronized (waitTimes) {
waitTimes.add(Long.valueOf(waitTime));
waitTimes.poll();
}
synchronized (maxBorrowWaitTimeMillisLock) {
if (waitTime > maxBorrowWaitTimeMillis) {
maxBorrowWaitTimeMillis = waitTime;
}
}
return p.getObject();
}
|
#vulnerable code
public T borrowObject(K key, long borrowMaxWait) throws Exception {
assertOpen();
PooledObject<T> p = null;
// Get local copy of current config so it is consistent for entire
// method execution
boolean blockWhenExhausted = this.blockWhenExhausted;
boolean create;
long waitTime = 0;
ObjectDeque<T> objectDeque = register(key);
try {
while (p == null) {
create = false;
if (blockWhenExhausted) {
if (objectDeque != null) {
p = objectDeque.getIdleObjects().pollFirst();
}
if (p == null) {
create = true;
p = create(key);
}
if (p == null && objectDeque != null) {
if (borrowMaxWait < 0) {
p = objectDeque.getIdleObjects().takeFirst();
} else {
waitTime = System.currentTimeMillis();
p = objectDeque.getIdleObjects().pollFirst(
borrowMaxWait, TimeUnit.MILLISECONDS);
waitTime = System.currentTimeMillis() - waitTime;
}
}
if (p == null) {
throw new NoSuchElementException(
"Timeout waiting for idle object");
}
if (!p.allocate()) {
p = null;
}
} else {
if (objectDeque != null) {
p = objectDeque.getIdleObjects().pollFirst();
}
if (p == null) {
create = true;
p = create(key);
}
if (p == null) {
throw new NoSuchElementException("Pool exhausted");
}
if (!p.allocate()) {
p = null;
}
}
if (p != null) {
try {
_factory.activateObject(key, p.getObject());
} catch (Exception e) {
try {
destroy(key, p, true);
} catch (Exception e1) {
// Ignore - activation failure is more important
}
p = null;
if (create) {
NoSuchElementException nsee = new NoSuchElementException(
"Unable to activate object");
nsee.initCause(e);
throw nsee;
}
}
if (p != null && getTestOnBorrow()) {
boolean validate = false;
Throwable validationThrowable = null;
try {
validate = _factory.validateObject(key, p.getObject());
} catch (Throwable t) {
PoolUtils.checkRethrow(t);
}
if (!validate) {
try {
destroy(key, p, true);
destroyedByBorrowValidationCount.incrementAndGet();
} catch (Exception e) {
// Ignore - validation failure is more important
}
p = null;
if (create) {
NoSuchElementException nsee = new NoSuchElementException(
"Unable to validate object");
nsee.initCause(validationThrowable);
throw nsee;
}
}
}
}
}
} finally {
deregister(key);
}
borrowedCount.incrementAndGet();
synchronized (idleTimes) {
idleTimes.add(Long.valueOf(p.getIdleTimeMillis()));
idleTimes.poll();
}
synchronized (waitTimes) {
waitTimes.add(Long.valueOf(waitTime));
waitTimes.poll();
}
synchronized (maxBorrowWaitTimeMillisLock) {
if (waitTime > maxBorrowWaitTimeMillis) {
maxBorrowWaitTimeMillis = waitTime;
}
}
return p.getObject();
}
#location 9
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void addObject(Object key) throws Exception {
assertOpen();
if (_factory == null) {
throw new IllegalStateException("Cannot add objects without a factory.");
}
Object obj = _factory.makeObject(key);
try {
assertOpen();
addObjectToPool(key, obj, false);
} catch (IllegalStateException ex) { // Pool closed
try {
_factory.destroyObject(key, obj);
} catch (Exception ex2) {
// swallow
}
throw ex;
}
}
|
#vulnerable code
public void addObject(Object key) throws Exception {
assertOpen();
if (_factory == null) {
throw new IllegalStateException("Cannot add objects without a factory.");
}
Object obj = _factory.makeObject(key);
synchronized (this) {
try {
assertOpen();
addObjectToPool(key, obj, false);
} catch (IllegalStateException ex) { // Pool closed
try {
_factory.destroyObject(key, obj);
} catch (Exception ex2) {
// swallow
}
throw ex;
}
}
}
#location 6
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void clear(Object key) {
Map toDestroy = new HashMap();
final ObjectQueue pool;
synchronized (this) {
pool = (ObjectQueue)(_poolMap.remove(key));
if (pool == null) {
return;
} else {
_poolList.remove(key);
}
// Copy objects to new list so pool.queue can be cleared inside
// the sync
List objects = new ArrayList();
objects.addAll(pool.queue);
toDestroy.put(key, objects);
_totalIdle = _totalIdle - pool.queue.size();
_totalInternalProcessing =
_totalInternalProcessing + pool.queue.size();
pool.queue.clear();
}
destroy(toDestroy, _factory);
}
|
#vulnerable code
public void clear(Object key) {
Map toDestroy = new HashMap();
final ObjectQueue pool;
synchronized (this) {
pool = (ObjectQueue)(_poolMap.remove(key));
if (pool == null) {
return;
} else {
_poolList.remove(key);
}
// Copy objects to new list so pool.queue can be cleared inside
// the sync
List objects = new ArrayList();
objects.addAll(pool.queue);
toDestroy.put(key, objects);
_totalIdle = _totalIdle - pool.queue.size();
_totalInternalProcessing =
_totalInternalProcessing + pool.queue.size();
pool.queue.clear();
}
destroy(toDestroy);
}
#location 22
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public long getIdleTimeMillis() {
return System.currentTimeMillis() - lastReturnTime;
}
|
#vulnerable code
public long getIdleTimeMillis() {
return System.currentTimeMillis() - lastActiveTime;
}
#location 2
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void clear() {
List toDestroy = new ArrayList();
synchronized(this) {
toDestroy.addAll(_pool);
_numInternalProcessing = _numInternalProcessing + _pool._size;
_pool.clear();
}
destroy(toDestroy, _factory);
}
|
#vulnerable code
public void clear() {
List toDestroy = new ArrayList();
synchronized(this) {
toDestroy.addAll(_pool);
_numInternalProcessing = _numInternalProcessing + _pool._size;
_pool.clear();
}
destroy(toDestroy);
}
#location 9
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void evict() throws Exception {
assertOpen();
if (_pool.size() == 0) {
return;
}
PooledObject<T> underTest = null;
for (int i = 0, m = getNumTests(); i < m; i++) {
if (_evictionIterator == null || !_evictionIterator.hasNext()) {
if (getLifo()) {
_evictionIterator = _pool.descendingIterator();
} else {
_evictionIterator = _pool.iterator();
}
}
if (!_evictionIterator.hasNext()) {
// Pool exhausted, nothing to do here
return;
} else {
try {
underTest = _evictionIterator.next();
} catch (NoSuchElementException nsee) {
// Object was borrowed in another thread
// Don't count this as an eviction test so reduce i;
i--;
_evictionIterator = null;
continue;
}
}
if (!underTest.startEvictionTest()) {
// Object was borrowed in another thread
// Don't count this as an eviction test so reduce i;
i--;
continue;
}
if (getMinEvictableIdleTimeMillis() > 0 &&
getMinEvictableIdleTimeMillis() <
underTest.getIdleTimeMillis() ||
(getSoftMinEvictableIdleTimeMillis() > 0 &&
getSoftMinEvictableIdleTimeMillis() <
underTest.getIdleTimeMillis() &&
getMinIdle() < _pool.size())) {
destroy(underTest);
} else {
if (getTestWhileIdle()) {
boolean active = false;
try {
_factory.activateObject(underTest.getObject());
active = true;
} catch(Exception e) {
destroy(underTest);
}
if(active) {
if(!_factory.validateObject(underTest.getObject())) {
destroy(underTest);
} else {
try {
_factory.passivateObject(underTest.getObject());
} catch(Exception e) {
destroy(underTest);
}
}
}
}
if (!underTest.endEvictionTest()) {
// TODO - May need to add code here once additional states
// are used
}
}
}
return;
}
|
#vulnerable code
public void evict() throws Exception {
assertOpen();
synchronized (this) {
if(_pool.isEmpty()) {
return;
}
if (null == _evictionCursor) {
_evictionCursor = (_pool.cursor(_lifo ? _pool.size() : 0));
}
}
for (int i=0,m=getNumTests();i<m;i++) {
final ObjectTimestampPair<T> pair;
synchronized (this) {
if ((_lifo && !_evictionCursor.hasPrevious()) ||
!_lifo && !_evictionCursor.hasNext()) {
_evictionCursor.close();
_evictionCursor = _pool.cursor(_lifo ? _pool.size() : 0);
}
pair = _lifo ?
_evictionCursor.previous() :
_evictionCursor.next();
_evictionCursor.remove();
_numInternalProcessing++;
}
boolean removeObject = false;
final long idleTimeMilis = System.currentTimeMillis() - pair.getTstamp();
if ((getMinEvictableIdleTimeMillis() > 0) &&
(idleTimeMilis > getMinEvictableIdleTimeMillis())) {
removeObject = true;
} else if ((getSoftMinEvictableIdleTimeMillis() > 0) &&
(idleTimeMilis > getSoftMinEvictableIdleTimeMillis()) &&
((getNumIdle() + 1)> getMinIdle())) { // +1 accounts for object we are processing
removeObject = true;
}
if(getTestWhileIdle() && !removeObject) {
boolean active = false;
try {
_factory.activateObject(pair.getValue());
active = true;
} catch(Exception e) {
removeObject=true;
}
if(active) {
if(!_factory.validateObject(pair.getValue())) {
removeObject=true;
} else {
try {
_factory.passivateObject(pair.getValue());
} catch(Exception e) {
removeObject=true;
}
}
}
}
if (removeObject) {
try {
_factory.destroyObject(pair.getValue());
} catch(Exception e) {
// ignored
}
}
synchronized (this) {
if(!removeObject) {
_evictionCursor.add(pair);
if (_lifo) {
// Skip over the element we just added back
_evictionCursor.previous();
}
}
_numInternalProcessing--;
}
}
allocate();
}
#location 12
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void evict() throws Exception {
Object key = null;
boolean testWhileIdle;
long minEvictableIdleTimeMillis;
synchronized (this) {
// Get local copy of current config. Can't sync when used later as
// it can result in a deadlock. Has the added advantage that config
// is consistent for entire method execution
testWhileIdle = _testWhileIdle;
minEvictableIdleTimeMillis = _minEvictableIdleTimeMillis;
// Initialize key to last key value
if (_evictionKeyCursor != null &&
_evictionKeyCursor._lastReturned != null) {
key = _evictionKeyCursor._lastReturned.value();
}
}
for (int i=0,m=getNumTests(); i<m; i++) {
final ObjectTimestampPair pair;
synchronized (this) {
// make sure pool map is not empty; otherwise do nothing
if (_poolMap == null || _poolMap.size() == 0) {
continue;
}
// if we don't have a key cursor, then create one
if (null == _evictionKeyCursor) {
resetEvictionKeyCursor();
key = null;
}
// if we don't have an object cursor, create one
if (null == _evictionCursor) {
// if the _evictionKeyCursor has a next value, use this key
if (_evictionKeyCursor.hasNext()) {
key = _evictionKeyCursor.next();
resetEvictionObjectCursor(key);
} else {
// Reset the key cursor and try again
resetEvictionKeyCursor();
if (_evictionKeyCursor != null) {
if (_evictionKeyCursor.hasNext()) {
key = _evictionKeyCursor.next();
resetEvictionObjectCursor(key);
}
}
}
}
if (_evictionCursor == null) {
continue; // should never happen; do nothing
}
// If eviction cursor is exhausted, try to move
// to the next key and reset
if((_lifo && !_evictionCursor.hasPrevious()) ||
(!_lifo && !_evictionCursor.hasNext())) {
if (_evictionKeyCursor != null) {
if (_evictionKeyCursor.hasNext()) {
key = _evictionKeyCursor.next();
resetEvictionObjectCursor(key);
} else { // Need to reset Key cursor
resetEvictionKeyCursor();
if (_evictionKeyCursor != null) {
if (_evictionKeyCursor.hasNext()) {
key = _evictionKeyCursor.next();
resetEvictionObjectCursor(key);
}
}
}
}
}
if((_lifo && !_evictionCursor.hasPrevious()) ||
(!_lifo && !_evictionCursor.hasNext())) {
continue; // reset failed, do nothing
}
// if LIFO and the _evictionCursor has a previous object,
// or FIFO and _evictionCursor has a next object, test it
pair = _lifo ?
(ObjectTimestampPair) _evictionCursor.previous() :
(ObjectTimestampPair) _evictionCursor.next();
_evictionCursor.remove();
_totalIdle--;
_totalInternalProcessing++;
}
boolean removeObject=false;
if((minEvictableIdleTimeMillis > 0) &&
(System.currentTimeMillis() - pair.tstamp >
minEvictableIdleTimeMillis)) {
removeObject=true;
}
if(testWhileIdle && removeObject == false) {
boolean active = false;
try {
_factory.activateObject(key,pair.value);
active = true;
} catch(Exception e) {
removeObject=true;
}
if(active) {
if(!_factory.validateObject(key,pair.value)) {
removeObject=true;
} else {
try {
_factory.passivateObject(key,pair.value);
} catch(Exception e) {
removeObject=true;
}
}
}
}
if(removeObject) {
try {
_factory.destroyObject(key, pair.value);
} catch(Exception e) {
// ignored
} finally {
// Do not remove the key from the _poolList or _poolmap,
// even if the list stored in the _poolMap for this key is
// empty when minIdle > 0.
//
// Otherwise if it was the last object for that key,
// drop that pool
if (_minIdle == 0) {
synchronized (this) {
ObjectQueue objectQueue =
(ObjectQueue)_poolMap.get(key);
if (objectQueue != null &&
objectQueue.queue.isEmpty()) {
_poolMap.remove(key);
_poolList.remove(key);
}
}
}
}
}
synchronized (this) {
if(!removeObject) {
_evictionCursor.add(pair);
_totalIdle++;
if (_lifo) {
// Skip over the element we just added back
_evictionCursor.previous();
}
}
_totalInternalProcessing--;
}
}
}
|
#vulnerable code
public void evict() throws Exception {
// Initialize key to last key value
Object key = null;
synchronized (this) {
if (_evictionKeyCursor != null &&
_evictionKeyCursor._lastReturned != null) {
key = _evictionKeyCursor._lastReturned.value();
}
}
for (int i=0,m=getNumTests(); i<m; i++) {
final ObjectTimestampPair pair;
synchronized (this) {
// make sure pool map is not empty; otherwise do nothing
if (_poolMap == null || _poolMap.size() == 0) {
continue;
}
// if we don't have a key cursor, then create one
if (null == _evictionKeyCursor) {
resetEvictionKeyCursor();
key = null;
}
// if we don't have an object cursor, create one
if (null == _evictionCursor) {
// if the _evictionKeyCursor has a next value, use this key
if (_evictionKeyCursor.hasNext()) {
key = _evictionKeyCursor.next();
resetEvictionObjectCursor(key);
} else {
// Reset the key cursor and try again
resetEvictionKeyCursor();
if (_evictionKeyCursor != null) {
if (_evictionKeyCursor.hasNext()) {
key = _evictionKeyCursor.next();
resetEvictionObjectCursor(key);
}
}
}
}
if (_evictionCursor == null) {
continue; // should never happen; do nothing
}
// If eviction cursor is exhausted, try to move
// to the next key and reset
if((_lifo && !_evictionCursor.hasPrevious()) ||
(!_lifo && !_evictionCursor.hasNext())) {
if (_evictionKeyCursor != null) {
if (_evictionKeyCursor.hasNext()) {
key = _evictionKeyCursor.next();
resetEvictionObjectCursor(key);
} else { // Need to reset Key cursor
resetEvictionKeyCursor();
if (_evictionKeyCursor != null) {
if (_evictionKeyCursor.hasNext()) {
key = _evictionKeyCursor.next();
resetEvictionObjectCursor(key);
}
}
}
}
}
if((_lifo && !_evictionCursor.hasPrevious()) ||
(!_lifo && !_evictionCursor.hasNext())) {
continue; // reset failed, do nothing
}
// if LIFO and the _evictionCursor has a previous object,
// or FIFO and _evictionCursor has a next object, test it
pair = _lifo ?
(ObjectTimestampPair) _evictionCursor.previous() :
(ObjectTimestampPair) _evictionCursor.next();
_evictionCursor.remove();
_totalIdle--;
_totalInternalProcessing++;
}
boolean removeObject=false;
if((_minEvictableIdleTimeMillis > 0) &&
(System.currentTimeMillis() - pair.tstamp >
_minEvictableIdleTimeMillis)) {
removeObject=true;
}
if(_testWhileIdle && removeObject == false) {
boolean active = false;
try {
_factory.activateObject(key,pair.value);
active = true;
} catch(Exception e) {
removeObject=true;
}
if(active) {
if(!_factory.validateObject(key,pair.value)) {
removeObject=true;
} else {
try {
_factory.passivateObject(key,pair.value);
} catch(Exception e) {
removeObject=true;
}
}
}
}
if(removeObject) {
try {
_factory.destroyObject(key, pair.value);
} catch(Exception e) {
// ignored
} finally {
// Do not remove the key from the _poolList or _poolmap,
// even if the list stored in the _poolMap for this key is
// empty when minIdle > 0.
//
// Otherwise if it was the last object for that key,
// drop that pool
if (_minIdle == 0) {
synchronized (this) {
ObjectQueue objectQueue =
(ObjectQueue)_poolMap.get(key);
if (objectQueue != null &&
objectQueue.queue.isEmpty()) {
_poolMap.remove(key);
_poolList.remove(key);
}
}
}
}
}
synchronized (this) {
if(!removeObject) {
_evictionCursor.add(pair);
_totalIdle++;
if (_lifo) {
// Skip over the element we just added back
_evictionCursor.previous();
}
}
_totalInternalProcessing--;
}
}
}
#location 84
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public void evict() throws Exception {
assertOpen();
if (getNumIdle() == 0) {
return;
}
synchronized (evictionLock) {
boolean testWhileIdle = getTestWhileIdle();
long idleEvictTime = Long.MAX_VALUE;
if (getMinEvictableIdleTimeMillis() > 0) {
idleEvictTime = getMinEvictableIdleTimeMillis();
}
PooledObject<T> underTest = null;
LinkedBlockingDeque<PooledObject<T>> idleObjects = null;
for (int i = 0, m = getNumTests(); i < m; i++) {
if(evictionIterator == null || !evictionIterator.hasNext()) {
if (evictionKeyIterator == null ||
!evictionKeyIterator.hasNext()) {
List<K> keyCopy = new ArrayList<K>();
keyCopy.addAll(poolKeyList);
evictionKeyIterator = keyCopy.iterator();
}
while (evictionKeyIterator.hasNext()) {
evictionKey = evictionKeyIterator.next();
ObjectDeque<T> objectDeque = poolMap.get(evictionKey);
if (objectDeque == null) {
continue;
}
idleObjects = objectDeque.getIdleObjects();
if (getLifo()) {
evictionIterator = idleObjects.descendingIterator();
} else {
evictionIterator = idleObjects.iterator();
}
if (evictionIterator.hasNext()) {
break;
}
evictionIterator = null;
}
}
if (evictionIterator == null) {
// Pools exhausted
return;
}
try {
underTest = evictionIterator.next();
} catch (NoSuchElementException nsee) {
// Object was borrowed in another thread
// Don't count this as an eviction test so reduce i;
i--;
evictionIterator = null;
continue;
}
if (!underTest.startEvictionTest()) {
// Object was borrowed in another thread
// Don't count this as an eviction test so reduce i;
i--;
continue;
}
if (idleEvictTime < underTest.getIdleTimeMillis()) {
destroy(evictionKey, underTest, true);
destroyedByEvictorCount.incrementAndGet();
} else {
if (testWhileIdle) {
boolean active = false;
try {
factory.activateObject(evictionKey,
underTest.getObject());
active = true;
} catch (Exception e) {
destroy(evictionKey, underTest, true);
destroyedByEvictorCount.incrementAndGet();
}
if (active) {
if (!factory.validateObject(evictionKey,
underTest.getObject())) {
destroy(evictionKey, underTest, true);
destroyedByEvictorCount.incrementAndGet();
} else {
try {
factory.passivateObject(evictionKey,
underTest.getObject());
} catch (Exception e) {
destroy(evictionKey, underTest, true);
destroyedByEvictorCount.incrementAndGet();
}
}
}
}
if (!underTest.endEvictionTest(idleObjects)) {
// TODO - May need to add code here once additional states
// are used
}
}
}
}
}
|
#vulnerable code
public void evict() throws Exception {
assertOpen();
if (getNumIdle() == 0) {
return;
}
boolean testWhileIdle = getTestWhileIdle();
long idleEvictTime = Long.MAX_VALUE;
if (getMinEvictableIdleTimeMillis() > 0) {
idleEvictTime = getMinEvictableIdleTimeMillis();
}
PooledObject<T> underTest = null;
LinkedBlockingDeque<PooledObject<T>> idleObjects = null;
for (int i = 0, m = getNumTests(); i < m; i++) {
if(evictionIterator == null || !evictionIterator.hasNext()) {
if (evictionKeyIterator == null ||
!evictionKeyIterator.hasNext()) {
List<K> keyCopy = new ArrayList<K>();
keyCopy.addAll(poolKeyList);
evictionKeyIterator = keyCopy.iterator();
}
while (evictionKeyIterator.hasNext()) {
evictionKey = evictionKeyIterator.next();
ObjectDeque<T> objectDeque = poolMap.get(evictionKey);
if (objectDeque == null) {
continue;
}
idleObjects = objectDeque.getIdleObjects();
if (getLifo()) {
evictionIterator = idleObjects.descendingIterator();
} else {
evictionIterator = idleObjects.iterator();
}
if (evictionIterator.hasNext()) {
break;
}
evictionIterator = null;
}
}
if (evictionIterator == null) {
// Pools exhausted
return;
}
try {
underTest = evictionIterator.next();
} catch (NoSuchElementException nsee) {
// Object was borrowed in another thread
// Don't count this as an eviction test so reduce i;
i--;
evictionIterator = null;
continue;
}
if (!underTest.startEvictionTest()) {
// Object was borrowed in another thread
// Don't count this as an eviction test so reduce i;
i--;
continue;
}
if (idleEvictTime < underTest.getIdleTimeMillis()) {
destroy(evictionKey, underTest, true);
destroyedByEvictorCount.incrementAndGet();
} else {
if (testWhileIdle) {
boolean active = false;
try {
factory.activateObject(evictionKey,
underTest.getObject());
active = true;
} catch (Exception e) {
destroy(evictionKey, underTest, true);
destroyedByEvictorCount.incrementAndGet();
}
if (active) {
if (!factory.validateObject(evictionKey,
underTest.getObject())) {
destroy(evictionKey, underTest, true);
destroyedByEvictorCount.incrementAndGet();
} else {
try {
factory.passivateObject(evictionKey,
underTest.getObject());
} catch (Exception e) {
destroy(evictionKey, underTest, true);
destroyedByEvictorCount.incrementAndGet();
}
}
}
}
if (!underTest.endEvictionTest(idleObjects)) {
// TODO - May need to add code here once additional states
// are used
}
}
}
}
#location 42
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public T borrowObject(K key, long borrowMaxWait) throws Exception {
assertOpen();
PooledObject<T> p = null;
// Get local copy of current config so it is consistent for entire
// method execution
boolean blockWhenExhausted = getBlockWhenExhausted();
boolean create;
long waitTime = 0;
ObjectDeque<T> objectDeque = register(key);
try {
while (p == null) {
create = false;
if (blockWhenExhausted) {
if (objectDeque != null) {
p = objectDeque.getIdleObjects().pollFirst();
}
if (p == null) {
create = true;
p = create(key);
}
if (p == null && objectDeque != null) {
if (borrowMaxWait < 0) {
p = objectDeque.getIdleObjects().takeFirst();
} else {
waitTime = System.currentTimeMillis();
p = objectDeque.getIdleObjects().pollFirst(
borrowMaxWait, TimeUnit.MILLISECONDS);
waitTime = System.currentTimeMillis() - waitTime;
}
}
if (p == null) {
throw new NoSuchElementException(
"Timeout waiting for idle object");
}
if (!p.allocate()) {
p = null;
}
} else {
if (objectDeque != null) {
p = objectDeque.getIdleObjects().pollFirst();
}
if (p == null) {
create = true;
p = create(key);
}
if (p == null) {
throw new NoSuchElementException("Pool exhausted");
}
if (!p.allocate()) {
p = null;
}
}
if (p != null) {
try {
factory.activateObject(key, p.getObject());
} catch (Exception e) {
try {
destroy(key, p, true);
} catch (Exception e1) {
// Ignore - activation failure is more important
}
p = null;
if (create) {
NoSuchElementException nsee = new NoSuchElementException(
"Unable to activate object");
nsee.initCause(e);
throw nsee;
}
}
if (p != null && getTestOnBorrow()) {
boolean validate = false;
Throwable validationThrowable = null;
try {
validate = factory.validateObject(key, p.getObject());
} catch (Throwable t) {
PoolUtils.checkRethrow(t);
}
if (!validate) {
try {
destroy(key, p, true);
destroyedByBorrowValidationCount.incrementAndGet();
} catch (Exception e) {
// Ignore - validation failure is more important
}
p = null;
if (create) {
NoSuchElementException nsee = new NoSuchElementException(
"Unable to validate object");
nsee.initCause(validationThrowable);
throw nsee;
}
}
}
}
}
} finally {
deregister(key);
}
updateStatsBorrow(p, waitTime);
return p.getObject();
}
|
#vulnerable code
public T borrowObject(K key, long borrowMaxWait) throws Exception {
assertOpen();
PooledObject<T> p = null;
// Get local copy of current config so it is consistent for entire
// method execution
boolean blockWhenExhausted = getBlockWhenExhausted();
boolean create;
long waitTime = 0;
ObjectDeque<T> objectDeque = register(key);
try {
while (p == null) {
create = false;
if (blockWhenExhausted) {
if (objectDeque != null) {
p = objectDeque.getIdleObjects().pollFirst();
}
if (p == null) {
create = true;
p = create(key);
}
if (p == null && objectDeque != null) {
if (borrowMaxWait < 0) {
p = objectDeque.getIdleObjects().takeFirst();
} else {
waitTime = System.currentTimeMillis();
p = objectDeque.getIdleObjects().pollFirst(
borrowMaxWait, TimeUnit.MILLISECONDS);
waitTime = System.currentTimeMillis() - waitTime;
}
}
if (p == null) {
throw new NoSuchElementException(
"Timeout waiting for idle object");
}
if (!p.allocate()) {
p = null;
}
} else {
if (objectDeque != null) {
p = objectDeque.getIdleObjects().pollFirst();
}
if (p == null) {
create = true;
p = create(key);
}
if (p == null) {
throw new NoSuchElementException("Pool exhausted");
}
if (!p.allocate()) {
p = null;
}
}
if (p != null) {
try {
factory.activateObject(key, p.getObject());
} catch (Exception e) {
try {
destroy(key, p, true);
} catch (Exception e1) {
// Ignore - activation failure is more important
}
p = null;
if (create) {
NoSuchElementException nsee = new NoSuchElementException(
"Unable to activate object");
nsee.initCause(e);
throw nsee;
}
}
if (p != null && getTestOnBorrow()) {
boolean validate = false;
Throwable validationThrowable = null;
try {
validate = factory.validateObject(key, p.getObject());
} catch (Throwable t) {
PoolUtils.checkRethrow(t);
}
if (!validate) {
try {
destroy(key, p, true);
destroyedByBorrowValidationCount.incrementAndGet();
} catch (Exception e) {
// Ignore - validation failure is more important
}
p = null;
if (create) {
NoSuchElementException nsee = new NoSuchElementException(
"Unable to validate object");
nsee.initCause(validationThrowable);
throw nsee;
}
}
}
}
}
} finally {
deregister(key);
}
borrowedCount.incrementAndGet();
synchronized (idleTimes) {
idleTimes.add(Long.valueOf(p.getIdleTimeMillis()));
idleTimes.poll();
}
synchronized (waitTimes) {
waitTimes.add(Long.valueOf(waitTime));
waitTimes.poll();
}
synchronized (maxBorrowWaitTimeMillisLock) {
if (waitTime > maxBorrowWaitTimeMillis) {
maxBorrowWaitTimeMillis = waitTime;
}
}
return p.getObject();
}
#location 23
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
@Override
public void printStackTrace(PrintWriter writer) {
Exception borrowedBy = this.borrowedBy;
if (borrowedBy != null) {
borrowedBy.printStackTrace(writer);
}
Exception usedBy = this.usedBy;
if (usedBy != null) {
usedBy.printStackTrace(writer);
}
}
|
#vulnerable code
@Override
public void printStackTrace(PrintWriter writer) {
if (borrowedBy != null) {
borrowedBy.printStackTrace(writer);
}
if (usedBy != null) {
usedBy.printStackTrace(writer);
}
}
#location 4
#vulnerability type THREAD_SAFETY_VIOLATION
|
Below is the vulnerable code, please generate the patch based on the following information.
|
#fixed code
public long getActiveTimeMillis() {
// Take copies to avoid threading issues
long rTime = lastReturnTime;
long bTime = lastBorrowTime;
if (rTime > bTime) {
return rTime - bTime;
} else {
return System.currentTimeMillis() - bTime;
}
}
|
#vulnerable code
public long getActiveTimeMillis() {
if (lastReturnTime > lastBorrowTime) {
return lastReturnTime - lastBorrowTime;
} else {
return System.currentTimeMillis() - lastBorrowTime;
}
}
#location 3
#vulnerability type THREAD_SAFETY_VIOLATION
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.