id
stringlengths 27
31
| content
stringlengths 14
287k
| max_stars_repo_path
stringlengths 52
57
|
|---|---|---|
crossvul-java_data_bad_4727_2
|
/**
* Copyright (c) 2000-2012 Liferay, Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*/
package com.liferay.portal.freemarker;
import com.liferay.portal.kernel.templateparser.TemplateContext;
import com.liferay.portal.kernel.util.GetterUtil;
import com.liferay.portal.kernel.util.SetUtil;
import com.liferay.portal.kernel.util.StringPool;
import com.liferay.portal.kernel.util.Validator;
import com.liferay.portal.model.Theme;
import com.liferay.portal.template.TemplateContextHelper;
import com.liferay.portal.template.TemplatePortletPreferences;
import com.liferay.portal.theme.ThemeDisplay;
import com.liferay.portal.util.PropsValues;
import com.liferay.portal.util.WebKeys;
import freemarker.ext.beans.BeansWrapper;
import freemarker.template.utility.ObjectConstructor;
import java.util.Map;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
/**
* @author Mika Koivisto
* @author Raymond Augé
*/
public class FreeMarkerTemplateContextHelper extends TemplateContextHelper {
@Override
public Map<String, Object> getHelperUtilities() {
Map<String, Object> helperUtilities = super.getHelperUtilities();
// Enum util
helperUtilities.put(
"enumUtil", BeansWrapper.getDefaultInstance().getEnumModels());
// Object util
helperUtilities.put("objectUtil", new ObjectConstructor());
// Portlet preferences
helperUtilities.put(
"freeMarkerPortletPreferences", new TemplatePortletPreferences());
// Static class util
helperUtilities.put(
"staticUtil", BeansWrapper.getDefaultInstance().getStaticModels());
return helperUtilities;
}
@Override
public Set<String> getRestrictedVariables() {
return SetUtil.fromArray(
PropsValues.JOURNAL_TEMPLATE_FREEMARKER_RESTRICTED_VARIABLES);
}
@Override
public void prepare(
TemplateContext templateContext, HttpServletRequest request) {
super.prepare(templateContext, request);
// Theme display
ThemeDisplay themeDisplay = (ThemeDisplay)request.getAttribute(
WebKeys.THEME_DISPLAY);
if (themeDisplay != null) {
Theme theme = themeDisplay.getTheme();
// Full css and templates path
String servletContextName = GetterUtil.getString(
theme.getServletContextName());
templateContext.put(
"fullCssPath",
StringPool.SLASH + servletContextName +
theme.getFreeMarkerTemplateLoader() + theme.getCssPath());
templateContext.put(
"fullTemplatesPath",
StringPool.SLASH + servletContextName +
theme.getFreeMarkerTemplateLoader() +
theme.getTemplatesPath());
// Init
templateContext.put(
"init",
StringPool.SLASH + themeDisplay.getPathContext() +
FreeMarkerTemplateLoader.SERVLET_SEPARATOR +
"/html/themes/_unstyled/templates/init.ftl");
}
// Insert custom ftl variables
Map<String, Object> ftlVariables =
(Map<String, Object>)request.getAttribute(WebKeys.FTL_VARIABLES);
if (ftlVariables != null) {
for (Map.Entry<String, Object> entry : ftlVariables.entrySet()) {
String key = entry.getKey();
Object value = entry.getValue();
if (Validator.isNotNull(key)) {
templateContext.put(key, value);
}
}
}
}
}
|
./CrossVul/dataset_final_sorted/CWE-264/java/bad_4727_2
|
crossvul-java_data_bad_3820_4
|
/**
* Copyright (c) 2009 - 2012 Red Hat, Inc.
*
* This software is licensed to you under the GNU General Public License,
* version 2 (GPLv2). There is NO WARRANTY for this software, express or
* implied, including the implied warranties of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
* along with this software; if not, see
* http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
*
* Red Hat trademarks are not licensed under GPLv2. No permission is
* granted to use or replicate Red Hat trademarks that are incorporated
* in this software or its documentation.
*/
package org.candlepin.sync;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintStream;
import java.io.Reader;
import java.net.URISyntaxException;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import org.candlepin.config.CandlepinCommonTestConfig;
import org.candlepin.config.Config;
import org.candlepin.config.ConfigProperties;
import org.candlepin.model.ExporterMetadata;
import org.candlepin.model.ExporterMetadataCurator;
import org.candlepin.model.Owner;
import org.candlepin.sync.Importer.ImportFile;
import org.codehaus.jackson.JsonGenerationException;
import org.codehaus.jackson.map.JsonMappingException;
import org.codehaus.jackson.map.ObjectMapper;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.xnap.commons.i18n.I18n;
import org.xnap.commons.i18n.I18nFactory;
/**
* ImporterTest
*/
public class ImporterTest {
private ObjectMapper mapper;
private I18n i18n;
private static final String MOCK_JS_PATH = "/tmp/empty.js";
private CandlepinCommonTestConfig config;
@Before
public void init() throws FileNotFoundException, URISyntaxException {
mapper = SyncUtils.getObjectMapper(new Config(new HashMap<String, String>()));
i18n = I18nFactory.getI18n(getClass(), Locale.US, I18nFactory.FALLBACK);
config = new CandlepinCommonTestConfig();
config.setProperty(ConfigProperties.SYNC_WORK_DIR, "/tmp");
PrintStream ps = new PrintStream(new File(this.getClass()
.getClassLoader().getResource("candlepin_info.properties").toURI()));
ps.println("version=0.0.3");
ps.println("release=1");
ps.close();
}
@Test
public void validateMetaJson() throws Exception {
/* read file
* read in version
* read in created date
* make sure created date is XYZ
* make sure version is > ABC
*/
Date now = new Date();
File file = createFile("/tmp/meta", "0.0.3", now,
"test_user", "prefix");
File actual = createFile("/tmp/meta.json", "0.0.3", now,
"test_user", "prefix");
ExporterMetadataCurator emc = mock(ExporterMetadataCurator.class);
ExporterMetadata em = new ExporterMetadata();
Date daybefore = getDateBeforeDays(1);
em.setExported(daybefore);
em.setId("42");
em.setType(ExporterMetadata.TYPE_SYSTEM);
when(emc.lookupByType(ExporterMetadata.TYPE_SYSTEM)).thenReturn(em);
Importer i = new Importer(null, null, null, null, null, null, null,
null, null, emc, null, null, i18n);
i.validateMetadata(ExporterMetadata.TYPE_SYSTEM, null, actual,
new ConflictOverrides());
Meta fileMeta = mapper.readValue(file, Meta.class);
Meta actualMeta = mapper.readValue(actual, Meta.class);
assertEquals(fileMeta.getPrincipalName(), actualMeta.getPrincipalName());
assertEquals(fileMeta.getCreated().getTime(), actualMeta.getCreated().getTime());
assertEquals(fileMeta.getWebAppPrefix(), actualMeta.getWebAppPrefix());
assertTrue(file.delete());
assertTrue(actual.delete());
assertTrue(daybefore.compareTo(em.getExported()) < 0);
}
@Test
public void firstRun() throws Exception {
File f = createFile("/tmp/meta", "0.0.3", new Date(),
"test_user", "prefix");
File actualmeta = createFile("/tmp/meta.json", "0.0.3", new Date(),
"test_user", "prefix");
ExporterMetadataCurator emc = mock(ExporterMetadataCurator.class);
when(emc.lookupByType(ExporterMetadata.TYPE_SYSTEM)).thenReturn(null);
Importer i = new Importer(null, null, null, null, null, null, null,
null, null, emc, null, null, i18n);
i.validateMetadata(ExporterMetadata.TYPE_SYSTEM, null, actualmeta,
new ConflictOverrides());
assertTrue(f.delete());
assertTrue(actualmeta.delete());
verify(emc).create(any(ExporterMetadata.class));
}
@Test
public void oldImport() throws Exception {
// actualmeta is the mock for the import itself
File actualmeta = createFile("/tmp/meta.json", "0.0.3", getDateBeforeDays(10),
"test_user", "prefix");
ExporterMetadataCurator emc = mock(ExporterMetadataCurator.class);
// emc is the mock for lastrun (i.e., the most recent import in CP)
ExporterMetadata em = new ExporterMetadata();
em.setExported(getDateBeforeDays(3));
em.setId("42");
em.setType(ExporterMetadata.TYPE_SYSTEM);
when(emc.lookupByType(ExporterMetadata.TYPE_SYSTEM)).thenReturn(em);
Importer i = new Importer(null, null, null, null, null, null, null,
null, null, emc, null, null, i18n);
try {
i.validateMetadata(ExporterMetadata.TYPE_SYSTEM, null, actualmeta,
new ConflictOverrides());
fail();
}
catch (ImportConflictException e) {
assertFalse(e.message().getConflicts().isEmpty());
assertEquals(1, e.message().getConflicts().size());
assertTrue(e.message().getConflicts().contains(
Importer.Conflict.MANIFEST_OLD));
}
}
@Test
public void sameImport() throws Exception {
// actualmeta is the mock for the import itself
Date date = getDateBeforeDays(10);
File actualmeta = createFile("/tmp/meta.json", "0.0.3", date,
"test_user", "prefix");
ExporterMetadataCurator emc = mock(ExporterMetadataCurator.class);
// emc is the mock for lastrun (i.e., the most recent import in CP)
ExporterMetadata em = new ExporterMetadata();
em.setExported(date); // exact same date = assumed same manifest
em.setId("42");
em.setType(ExporterMetadata.TYPE_SYSTEM);
when(emc.lookupByType(ExporterMetadata.TYPE_SYSTEM)).thenReturn(em);
Importer i = new Importer(null, null, null, null, null, null, null,
null, null, emc, null, null, i18n);
try {
i.validateMetadata(ExporterMetadata.TYPE_SYSTEM, null, actualmeta,
new ConflictOverrides());
fail();
}
catch (ImportConflictException e) {
assertFalse(e.message().getConflicts().isEmpty());
assertEquals(1, e.message().getConflicts().size());
assertTrue(e.message().getConflicts().contains(
Importer.Conflict.MANIFEST_SAME));
}
}
@Test
public void mergeConflicts() {
ImportConflictException e2 = new ImportConflictException("testing",
Importer.Conflict.DISTRIBUTOR_CONFLICT);
ImportConflictException e3 = new ImportConflictException("testing2",
Importer.Conflict.MANIFEST_OLD);
List<ImportConflictException> exceptions =
new LinkedList<ImportConflictException>();
exceptions.add(e2);
exceptions.add(e3);
ImportConflictException e1 = new ImportConflictException(exceptions);
assertEquals("testing\ntesting2", e1.message().getDisplayMessage());
assertEquals(2, e1.message().getConflicts().size());
assertTrue(e1.message().getConflicts().contains(
Importer.Conflict.DISTRIBUTOR_CONFLICT));
assertTrue(e1.message().getConflicts().contains(Importer.Conflict.MANIFEST_OLD));
}
@Test
public void newerImport() throws Exception {
// this tests bz #790751
Date importDate = getDateBeforeDays(10);
// actualmeta is the mock for the import itself
File actualmeta = createFile("/tmp/meta.json", "0.0.3", importDate,
"test_user", "prefix");
ExporterMetadataCurator emc = mock(ExporterMetadataCurator.class);
// em is the mock for lastrun (i.e., the most recent import in CP)
ExporterMetadata em = new ExporterMetadata();
em.setExported(getDateBeforeDays(30));
em.setId("42");
em.setType(ExporterMetadata.TYPE_SYSTEM);
when(emc.lookupByType(ExporterMetadata.TYPE_SYSTEM)).thenReturn(em);
Importer i = new Importer(null, null, null, null, null, null, null,
null, null, emc, null, null, i18n);
i.validateMetadata(ExporterMetadata.TYPE_SYSTEM, null, actualmeta,
new ConflictOverrides());
assertEquals(importDate, em.getExported());
}
@Test
public void newerVersionImport() throws Exception {
// if we do are importing candlepin 0.0.10 data into candlepin 0.0.3,
// import the rules.
String version = "0.0.10";
File actualmeta = createFile("/tmp/meta.json", version, new Date(),
"test_user", "prefix");
File[] jsArray = createMockJsFile(MOCK_JS_PATH);
ExporterMetadataCurator emc = mock(ExporterMetadataCurator.class);
RulesImporter ri = mock(RulesImporter.class);
when(emc.lookupByType(ExporterMetadata.TYPE_SYSTEM)).thenReturn(null);
Importer i = new Importer(null, null, ri, null, null, null, null,
null, null, emc, null, null, i18n);
i.importRules(jsArray, actualmeta);
//verify that rules were imported
verify(ri).importObject(any(Reader.class), eq(version));
}
@Test
public void olderVersionImport() throws Exception {
// if we are importing candlepin 0.0.1 data into
// candlepin 0.0.3, do not import the rules
File actualmeta = createFile("/tmp/meta.json", "0.0.1", new Date(),
"test_user", "prefix");
ExporterMetadataCurator emc = mock(ExporterMetadataCurator.class);
RulesImporter ri = mock(RulesImporter.class);
when(emc.lookupByType(ExporterMetadata.TYPE_SYSTEM)).thenReturn(null);
Importer i = new Importer(null, null, ri, null, null, null, null,
null, null, emc, null, null, i18n);
i.validateMetadata(ExporterMetadata.TYPE_SYSTEM, null, actualmeta,
new ConflictOverrides());
//verify that rules were not imported
verify(ri, never()).importObject(any(Reader.class), any(String.class));
}
@Test(expected = ImporterException.class)
public void nullType() throws ImporterException, IOException {
File actualmeta = createFile("/tmp/meta.json", "0.0.3", new Date(),
"test_user", "prefix");
try {
Importer i = new Importer(null, null, null, null, null, null, null,
null, null, null, null, null, i18n);
// null Type should cause exception
i.validateMetadata(null, null, actualmeta, new ConflictOverrides());
}
finally {
assertTrue(actualmeta.delete());
}
}
@Test(expected = ImporterException.class)
public void expectOwner() throws ImporterException, IOException {
File actualmeta = createFile("/tmp/meta.json", "0.0.3", new Date(),
"test_user", "prefix");
ExporterMetadataCurator emc = mock(ExporterMetadataCurator.class);
when(emc.lookupByTypeAndOwner(ExporterMetadata.TYPE_PER_USER, null))
.thenReturn(null);
Importer i = new Importer(null, null, null, null, null, null, null,
null, null, emc, null, null, i18n);
// null Type should cause exception
i.validateMetadata(ExporterMetadata.TYPE_PER_USER, null, actualmeta,
new ConflictOverrides());
verify(emc, never()).create(any(ExporterMetadata.class));
}
@Test
public void testImportWithNonZipArchive()
throws IOException, ImporterException {
Importer i = new Importer(null, null, null, null, null, null, null,
null, config, null, null, null, i18n);
Owner owner = mock(Owner.class);
ConflictOverrides co = mock(ConflictOverrides.class);
File archive = new File("/tmp/non_zip_file.zip");
FileWriter fw = new FileWriter(archive);
fw.write("Just a flat file");
fw.close();
try {
i.loadExport(owner, archive, co);
}
catch (ImportExtractionException e) {
assertEquals(e.getMessage(), i18n.tr("The archive {0} is " +
"not a properly compressed file or is empty", "non_zip_file.zip"));
return;
}
assertTrue(false);
}
@Test
public void testImportZipArchiveNoContent()
throws IOException, ImporterException {
Importer i = new Importer(null, null, null, null, null, null, null,
null, config, null, null, null, i18n);
Owner owner = mock(Owner.class);
ConflictOverrides co = mock(ConflictOverrides.class);
File archive = new File("/tmp/file.zip");
ZipOutputStream out = new ZipOutputStream(new FileOutputStream(archive));
out.putNextEntry(new ZipEntry("This is just a zip file with no content"));
out.close();
try {
i.loadExport(owner, archive, co);
}
catch (ImportExtractionException e) {
assertEquals(e.getMessage(), i18n.tr("The archive does not " +
"contain the required signature file"));
return;
}
assertTrue(false);
}
@Test
public void testImportZipSigConsumerNotZip()
throws IOException, ImporterException {
Importer i = new Importer(null, null, null, null, null, null, null,
null, config, null, null, null, i18n);
Owner owner = mock(Owner.class);
ConflictOverrides co = mock(ConflictOverrides.class);
File archive = new File("/tmp/file.zip");
ZipOutputStream out = new ZipOutputStream(new FileOutputStream(archive));
out.putNextEntry(new ZipEntry("signature"));
out.write("This is the placeholder for the signature file".getBytes());
File ceArchive = new File("/tmp/consumer_export.zip");
FileOutputStream fos = new FileOutputStream(ceArchive);
fos.write("This is just a flat file".getBytes());
fos.close();
addFileToArchive(out, ceArchive);
out.close();
try {
i.loadExport(owner, archive, co);
}
catch (ImportExtractionException e) {
assertEquals(e.getMessage(), i18n.tr("The archive {0} is " +
"not a properly compressed file or is empty", "consumer_export.zip"));
return;
}
assertTrue(false);
}
@Test
public void testImportZipSigAndEmptyConsumerZip()
throws IOException, ImporterException {
Importer i = new Importer(null, null, null, null, null, null, null,
null, config, null, null, null, i18n);
Owner owner = mock(Owner.class);
ConflictOverrides co = mock(ConflictOverrides.class);
File archive = new File("/tmp/file.zip");
ZipOutputStream out = new ZipOutputStream(new FileOutputStream(archive));
out.putNextEntry(new ZipEntry("signature"));
out.write("This is the placeholder for the signature file".getBytes());
File ceArchive = new File("/tmp/consumer_export.zip");
ZipOutputStream cezip = new ZipOutputStream(new FileOutputStream(ceArchive));
cezip.putNextEntry(new ZipEntry("This is just a zip file with no content"));
cezip.close();
addFileToArchive(out, ceArchive);
out.close();
try {
i.loadExport(owner, archive, co);
}
catch (ImportExtractionException e) {
assertEquals(e.getMessage(), i18n.tr("The consumer_export " +
"archive has no contents"));
return;
}
assertTrue(false);
}
@Test
public void testImportNoMeta()
throws IOException, ImporterException {
Importer i = new Importer(null, null, null, null, null, null, null,
null, config, null, null, null, i18n);
Owner owner = mock(Owner.class);
ConflictOverrides co = mock(ConflictOverrides.class);
Map<String, File> importFiles = new HashMap<String, File>();
File ruleDir = mock(File.class);
File[] rulesFiles = new File[]{mock(File.class)};
when(ruleDir.listFiles()).thenReturn(rulesFiles);
importFiles.put(ImportFile.META.fileName(), null);
importFiles.put(ImportFile.RULES.fileName(), ruleDir);
importFiles.put(ImportFile.CONSUMER_TYPE.fileName(), mock(File.class));
importFiles.put(ImportFile.CONSUMER.fileName(), mock(File.class));
importFiles.put(ImportFile.PRODUCTS.fileName(), mock(File.class));
importFiles.put(ImportFile.ENTITLEMENTS.fileName(), mock(File.class));
try {
i.importObjects(owner, importFiles, co);
}
catch (ImporterException e) {
assertEquals(e.getMessage(), i18n.tr("The archive does not contain the " +
"required meta.json file"));
return;
}
assertTrue(false);
}
@Test
public void testImportNoRulesDir()
throws IOException, ImporterException {
Importer i = new Importer(null, null, null, null, null, null, null,
null, config, null, null, null, i18n);
Owner owner = mock(Owner.class);
ConflictOverrides co = mock(ConflictOverrides.class);
Map<String, File> importFiles = new HashMap<String, File>();
importFiles.put(ImportFile.META.fileName(), mock(File.class));
importFiles.put(ImportFile.RULES.fileName(), null);
importFiles.put(ImportFile.CONSUMER_TYPE.fileName(), mock(File.class));
importFiles.put(ImportFile.CONSUMER.fileName(), mock(File.class));
importFiles.put(ImportFile.PRODUCTS.fileName(), mock(File.class));
importFiles.put(ImportFile.ENTITLEMENTS.fileName(), mock(File.class));
try {
i.importObjects(owner, importFiles, co);
}
catch (ImporterException e) {
assertEquals(e.getMessage(), i18n.tr("The archive does not contain the " +
"required rules directory"));
return;
}
assertTrue(false);
}
@Test
public void testImportNoRulesFile()
throws IOException, ImporterException {
Importer i = new Importer(null, null, null, null, null, null, null,
null, config, null, null, null, i18n);
Owner owner = mock(Owner.class);
ConflictOverrides co = mock(ConflictOverrides.class);
Map<String, File> importFiles = new HashMap<String, File>();
File ruleDir = mock(File.class);
when(ruleDir.listFiles()).thenReturn(new File[0]);
importFiles.put(ImportFile.META.fileName(), mock(File.class));
importFiles.put(ImportFile.RULES.fileName(), ruleDir);
importFiles.put(ImportFile.CONSUMER_TYPE.fileName(), mock(File.class));
importFiles.put(ImportFile.CONSUMER.fileName(), mock(File.class));
importFiles.put(ImportFile.PRODUCTS.fileName(), mock(File.class));
importFiles.put(ImportFile.ENTITLEMENTS.fileName(), mock(File.class));
try {
i.importObjects(owner, importFiles, co);
}
catch (ImporterException e) {
assertEquals(e.getMessage(), i18n.tr("The archive does not contain the " +
"required rules file(s)"));
return;
}
assertTrue(false);
}
@Test
public void testImportNoConsumerTypesDir()
throws IOException, ImporterException {
Importer i = new Importer(null, null, null, null, null, null, null,
null, config, null, null, null, i18n);
Owner owner = mock(Owner.class);
ConflictOverrides co = mock(ConflictOverrides.class);
Map<String, File> importFiles = new HashMap<String, File>();
File ruleDir = mock(File.class);
File[] rulesFiles = new File[]{mock(File.class)};
when(ruleDir.listFiles()).thenReturn(rulesFiles);
importFiles.put(ImportFile.META.fileName(), mock(File.class));
importFiles.put(ImportFile.RULES.fileName(), ruleDir);
importFiles.put(ImportFile.CONSUMER_TYPE.fileName(), null);
importFiles.put(ImportFile.CONSUMER.fileName(), mock(File.class));
importFiles.put(ImportFile.PRODUCTS.fileName(), mock(File.class));
importFiles.put(ImportFile.ENTITLEMENTS.fileName(), mock(File.class));
try {
i.importObjects(owner, importFiles, co);
}
catch (ImporterException e) {
assertEquals(e.getMessage(), i18n.tr("The archive does not contain the " +
"required consumer_types directory"));
return;
}
assertTrue(false);
}
@Test
public void testImportNoConsumer()
throws IOException, ImporterException {
Importer i = new Importer(null, null, null, null, null, null, null,
null, config, null, null, null, i18n);
Owner owner = mock(Owner.class);
ConflictOverrides co = mock(ConflictOverrides.class);
Map<String, File> importFiles = new HashMap<String, File>();
File ruleDir = mock(File.class);
File[] rulesFiles = new File[]{mock(File.class)};
when(ruleDir.listFiles()).thenReturn(rulesFiles);
importFiles.put(ImportFile.META.fileName(), mock(File.class));
importFiles.put(ImportFile.RULES.fileName(), ruleDir);
importFiles.put(ImportFile.CONSUMER_TYPE.fileName(), mock(File.class));
importFiles.put(ImportFile.CONSUMER.fileName(), null);
importFiles.put(ImportFile.PRODUCTS.fileName(), mock(File.class));
importFiles.put(ImportFile.ENTITLEMENTS.fileName(), mock(File.class));
try {
i.importObjects(owner, importFiles, co);
}
catch (ImporterException e) {
assertEquals(e.getMessage(), i18n.tr("The archive does not contain the " +
"required consumer.json file"));
return;
}
assertTrue(false);
}
@Test
public void testImportNoProductDir()
throws IOException, ImporterException {
RulesImporter ri = mock(RulesImporter.class);
Importer i = new Importer(null, null, ri, null, null, null, null,
null, config, null, null, null, i18n);
Owner owner = mock(Owner.class);
ConflictOverrides co = mock(ConflictOverrides.class);
Map<String, File> importFiles = new HashMap<String, File>();
File ruleDir = mock(File.class);
File[] rulesFiles = createMockJsFile(MOCK_JS_PATH);
when(ruleDir.listFiles()).thenReturn(rulesFiles);
File actualmeta = createFile("/tmp/meta.json", "0.0.3", new Date(),
"test_user", "prefix");
// this is the hook to stop testing. we confirm that the archive component tests
// are passed and then jump out instead of trying to fake the actual file
// processing.
when(ri.importObject(any(Reader.class), any(String.class))).thenThrow(
new RuntimeException("Done with the test"));
importFiles.put(ImportFile.META.fileName(), actualmeta);
importFiles.put(ImportFile.RULES.fileName(), ruleDir);
importFiles.put(ImportFile.CONSUMER_TYPE.fileName(), mock(File.class));
importFiles.put(ImportFile.CONSUMER.fileName(), mock(File.class));
importFiles.put(ImportFile.PRODUCTS.fileName(), null);
importFiles.put(ImportFile.ENTITLEMENTS.fileName(), null);
try {
i.importObjects(owner, importFiles, co);
}
catch (RuntimeException e) {
assertEquals(e.getMessage(), "Done with the test");
return;
}
assertTrue(false);
}
@Test
public void testImportProductNoEntitlementDir()
throws IOException, ImporterException {
Importer i = new Importer(null, null, null, null, null, null, null,
null, config, null, null, null, i18n);
Owner owner = mock(Owner.class);
ConflictOverrides co = mock(ConflictOverrides.class);
Map<String, File> importFiles = new HashMap<String, File>();
File ruleDir = mock(File.class);
File[] rulesFiles = new File[]{mock(File.class)};
when(ruleDir.listFiles()).thenReturn(rulesFiles);
importFiles.put(ImportFile.META.fileName(), mock(File.class));
importFiles.put(ImportFile.RULES.fileName(), ruleDir);
importFiles.put(ImportFile.CONSUMER_TYPE.fileName(), mock(File.class));
importFiles.put(ImportFile.CONSUMER.fileName(), mock(File.class));
importFiles.put(ImportFile.PRODUCTS.fileName(), mock(File.class));
importFiles.put(ImportFile.ENTITLEMENTS.fileName(), null);
try {
i.importObjects(owner, importFiles, co);
}
catch (ImporterException e) {
assertEquals(e.getMessage(), i18n.tr("The archive does not contain the " +
"required entitlements directory"));
return;
}
assertTrue(false);
}
@After
public void tearDown() throws Exception {
PrintStream ps = new PrintStream(new File(this.getClass()
.getClassLoader().getResource("candlepin_info.properties").toURI()));
ps.println("version=${version}");
ps.println("release=${release}");
ps.close();
File mockJs = new File(MOCK_JS_PATH);
mockJs.delete();
}
private File createFile(String filename, String version, Date date,
String username, String prefix)
throws JsonGenerationException, JsonMappingException, IOException {
File f = new File(filename);
Meta meta = new Meta(version, date, username, prefix);
mapper.writeValue(f, meta);
return f;
}
private File[] createMockJsFile(String filename)
throws IOException {
FileWriter f = new FileWriter(filename);
f.write("// nothing to see here");
f.close();
File[] fileArray = new File[1];
fileArray[0] = new File(filename);
return fileArray;
}
private Date getDateBeforeDays(int days) {
long daysinmillis = 24 * 60 * 60 * 1000;
long ms = System.currentTimeMillis() - (days * daysinmillis);
Date backDate = new Date();
backDate.setTime(ms);
return backDate;
}
private void addFileToArchive(ZipOutputStream out, File file)
throws IOException, FileNotFoundException {
out.putNextEntry(new ZipEntry(file.getName()));
FileInputStream in = new FileInputStream(file);
byte [] buf = new byte[1024];
int len;
while ((len = in.read(buf)) > 0) {
out.write(buf, 0, len);
}
out.closeEntry();
in.close();
}
}
|
./CrossVul/dataset_final_sorted/CWE-264/java/bad_3820_4
|
crossvul-java_data_bad_2293_2
|
404: Not Found
|
./CrossVul/dataset_final_sorted/CWE-264/java/bad_2293_2
|
crossvul-java_data_bad_2153_0
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.broker;
import java.io.EOFException;
import java.io.IOException;
import java.net.SocketException;
import java.net.URI;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import javax.transaction.xa.XAResource;
import org.apache.activemq.advisory.AdvisorySupport;
import org.apache.activemq.broker.region.ConnectionStatistics;
import org.apache.activemq.broker.region.RegionBroker;
import org.apache.activemq.command.ActiveMQDestination;
import org.apache.activemq.command.BrokerInfo;
import org.apache.activemq.command.Command;
import org.apache.activemq.command.CommandTypes;
import org.apache.activemq.command.ConnectionControl;
import org.apache.activemq.command.ConnectionError;
import org.apache.activemq.command.ConnectionId;
import org.apache.activemq.command.ConnectionInfo;
import org.apache.activemq.command.ConsumerControl;
import org.apache.activemq.command.ConsumerId;
import org.apache.activemq.command.ConsumerInfo;
import org.apache.activemq.command.ControlCommand;
import org.apache.activemq.command.DataArrayResponse;
import org.apache.activemq.command.DestinationInfo;
import org.apache.activemq.command.ExceptionResponse;
import org.apache.activemq.command.FlushCommand;
import org.apache.activemq.command.IntegerResponse;
import org.apache.activemq.command.KeepAliveInfo;
import org.apache.activemq.command.Message;
import org.apache.activemq.command.MessageAck;
import org.apache.activemq.command.MessageDispatch;
import org.apache.activemq.command.MessageDispatchNotification;
import org.apache.activemq.command.MessagePull;
import org.apache.activemq.command.ProducerAck;
import org.apache.activemq.command.ProducerId;
import org.apache.activemq.command.ProducerInfo;
import org.apache.activemq.command.RemoveSubscriptionInfo;
import org.apache.activemq.command.Response;
import org.apache.activemq.command.SessionId;
import org.apache.activemq.command.SessionInfo;
import org.apache.activemq.command.ShutdownInfo;
import org.apache.activemq.command.TransactionId;
import org.apache.activemq.command.TransactionInfo;
import org.apache.activemq.command.WireFormatInfo;
import org.apache.activemq.network.DemandForwardingBridge;
import org.apache.activemq.network.MBeanNetworkListener;
import org.apache.activemq.network.NetworkBridgeConfiguration;
import org.apache.activemq.network.NetworkBridgeFactory;
import org.apache.activemq.security.MessageAuthorizationPolicy;
import org.apache.activemq.state.CommandVisitor;
import org.apache.activemq.state.ConnectionState;
import org.apache.activemq.state.ConsumerState;
import org.apache.activemq.state.ProducerState;
import org.apache.activemq.state.SessionState;
import org.apache.activemq.state.TransactionState;
import org.apache.activemq.thread.Task;
import org.apache.activemq.thread.TaskRunner;
import org.apache.activemq.thread.TaskRunnerFactory;
import org.apache.activemq.transaction.Transaction;
import org.apache.activemq.transport.DefaultTransportListener;
import org.apache.activemq.transport.ResponseCorrelator;
import org.apache.activemq.transport.TransmitCallback;
import org.apache.activemq.transport.Transport;
import org.apache.activemq.transport.TransportDisposedIOException;
import org.apache.activemq.util.IntrospectionSupport;
import org.apache.activemq.util.MarshallingSupport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.MDC;
public class TransportConnection implements Connection, Task, CommandVisitor {
private static final Logger LOG = LoggerFactory.getLogger(TransportConnection.class);
private static final Logger TRANSPORTLOG = LoggerFactory.getLogger(TransportConnection.class.getName() + ".Transport");
private static final Logger SERVICELOG = LoggerFactory.getLogger(TransportConnection.class.getName() + ".Service");
// Keeps track of the broker and connector that created this connection.
protected final Broker broker;
protected final TransportConnector connector;
// Keeps track of the state of the connections.
// protected final ConcurrentHashMap localConnectionStates=new
// ConcurrentHashMap();
protected final Map<ConnectionId, ConnectionState> brokerConnectionStates;
// The broker and wireformat info that was exchanged.
protected BrokerInfo brokerInfo;
protected final List<Command> dispatchQueue = new LinkedList<Command>();
protected TaskRunner taskRunner;
protected final AtomicReference<IOException> transportException = new AtomicReference<IOException>();
protected AtomicBoolean dispatchStopped = new AtomicBoolean(false);
private final Transport transport;
private MessageAuthorizationPolicy messageAuthorizationPolicy;
private WireFormatInfo wireFormatInfo;
// Used to do async dispatch.. this should perhaps be pushed down into the
// transport layer..
private boolean inServiceException;
private final ConnectionStatistics statistics = new ConnectionStatistics();
private boolean manageable;
private boolean slow;
private boolean markedCandidate;
private boolean blockedCandidate;
private boolean blocked;
private boolean connected;
private boolean active;
private boolean starting;
private boolean pendingStop;
private long timeStamp;
private final AtomicBoolean stopping = new AtomicBoolean(false);
private final CountDownLatch stopped = new CountDownLatch(1);
private final AtomicBoolean asyncException = new AtomicBoolean(false);
private final Map<ProducerId, ProducerBrokerExchange> producerExchanges = new HashMap<ProducerId, ProducerBrokerExchange>();
private final Map<ConsumerId, ConsumerBrokerExchange> consumerExchanges = new HashMap<ConsumerId, ConsumerBrokerExchange>();
private final CountDownLatch dispatchStoppedLatch = new CountDownLatch(1);
private ConnectionContext context;
private boolean networkConnection;
private boolean faultTolerantConnection;
private final AtomicInteger protocolVersion = new AtomicInteger(CommandTypes.PROTOCOL_VERSION);
private DemandForwardingBridge duplexBridge;
private final TaskRunnerFactory taskRunnerFactory;
private final TaskRunnerFactory stopTaskRunnerFactory;
private TransportConnectionStateRegister connectionStateRegister = new SingleTransportConnectionStateRegister();
private final ReentrantReadWriteLock serviceLock = new ReentrantReadWriteLock();
private String duplexNetworkConnectorId;
private Throwable stopError = null;
/**
* @param taskRunnerFactory - can be null if you want direct dispatch to the transport
* else commands are sent async.
* @param stopTaskRunnerFactory - can <b>not</b> be null, used for stopping this connection.
*/
public TransportConnection(TransportConnector connector, final Transport transport, Broker broker,
TaskRunnerFactory taskRunnerFactory, TaskRunnerFactory stopTaskRunnerFactory) {
this.connector = connector;
this.broker = broker;
RegionBroker rb = (RegionBroker) broker.getAdaptor(RegionBroker.class);
brokerConnectionStates = rb.getConnectionStates();
if (connector != null) {
this.statistics.setParent(connector.getStatistics());
this.messageAuthorizationPolicy = connector.getMessageAuthorizationPolicy();
}
this.taskRunnerFactory = taskRunnerFactory;
this.stopTaskRunnerFactory = stopTaskRunnerFactory;
this.transport = transport;
final BrokerService brokerService = this.broker.getBrokerService();
if( this.transport instanceof BrokerServiceAware ) {
((BrokerServiceAware)this.transport).setBrokerService(brokerService);
}
this.transport.setTransportListener(new DefaultTransportListener() {
@Override
public void onCommand(Object o) {
serviceLock.readLock().lock();
try {
if (!(o instanceof Command)) {
throw new RuntimeException("Protocol violation - Command corrupted: " + o.toString());
}
Command command = (Command) o;
if (!brokerService.isStopping()) {
Response response = service(command);
if (response != null && !brokerService.isStopping()) {
dispatchSync(response);
}
} else {
throw new BrokerStoppedException("Broker " + brokerService + " is being stopped");
}
} finally {
serviceLock.readLock().unlock();
}
}
@Override
public void onException(IOException exception) {
serviceLock.readLock().lock();
try {
serviceTransportException(exception);
} finally {
serviceLock.readLock().unlock();
}
}
});
connected = true;
}
/**
* Returns the number of messages to be dispatched to this connection
*
* @return size of dispatch queue
*/
@Override
public int getDispatchQueueSize() {
synchronized (dispatchQueue) {
return dispatchQueue.size();
}
}
public void serviceTransportException(IOException e) {
BrokerService bService = connector.getBrokerService();
if (bService.isShutdownOnSlaveFailure()) {
if (brokerInfo != null) {
if (brokerInfo.isSlaveBroker()) {
LOG.error("Slave has exception: {} shutting down master now.", e.getMessage(), e);
try {
doStop();
bService.stop();
} catch (Exception ex) {
LOG.warn("Failed to stop the master", ex);
}
}
}
}
if (!stopping.get() && !pendingStop) {
transportException.set(e);
if (TRANSPORTLOG.isDebugEnabled()) {
TRANSPORTLOG.debug(this + " failed: " + e, e);
} else if (TRANSPORTLOG.isWarnEnabled() && !expected(e)) {
TRANSPORTLOG.warn(this + " failed: " + e);
}
stopAsync();
}
}
private boolean expected(IOException e) {
return isStomp() && ((e instanceof SocketException && e.getMessage().indexOf("reset") != -1) || e instanceof EOFException);
}
private boolean isStomp() {
URI uri = connector.getUri();
return uri != null && uri.getScheme() != null && uri.getScheme().indexOf("stomp") != -1;
}
/**
* Calls the serviceException method in an async thread. Since handling a
* service exception closes a socket, we should not tie up broker threads
* since client sockets may hang or cause deadlocks.
*/
@Override
public void serviceExceptionAsync(final IOException e) {
if (asyncException.compareAndSet(false, true)) {
new Thread("Async Exception Handler") {
@Override
public void run() {
serviceException(e);
}
}.start();
}
}
/**
* Closes a clients connection due to a detected error. Errors are ignored
* if: the client is closing or broker is closing. Otherwise, the connection
* error transmitted to the client before stopping it's transport.
*/
@Override
public void serviceException(Throwable e) {
// are we a transport exception such as not being able to dispatch
// synchronously to a transport
if (e instanceof IOException) {
serviceTransportException((IOException) e);
} else if (e.getClass() == BrokerStoppedException.class) {
// Handle the case where the broker is stopped
// But the client is still connected.
if (!stopping.get()) {
SERVICELOG.debug("Broker has been stopped. Notifying client and closing his connection.");
ConnectionError ce = new ConnectionError();
ce.setException(e);
dispatchSync(ce);
// Record the error that caused the transport to stop
this.stopError = e;
// Wait a little bit to try to get the output buffer to flush
// the exception notification to the client.
try {
Thread.sleep(500);
} catch (InterruptedException ie) {
Thread.currentThread().interrupt();
}
// Worst case is we just kill the connection before the
// notification gets to him.
stopAsync();
}
} else if (!stopping.get() && !inServiceException) {
inServiceException = true;
try {
SERVICELOG.warn("Async error occurred: ", e);
ConnectionError ce = new ConnectionError();
ce.setException(e);
if (pendingStop) {
dispatchSync(ce);
} else {
dispatchAsync(ce);
}
} finally {
inServiceException = false;
}
}
}
@Override
public Response service(Command command) {
MDC.put("activemq.connector", connector.getUri().toString());
Response response = null;
boolean responseRequired = command.isResponseRequired();
int commandId = command.getCommandId();
try {
if (!pendingStop) {
response = command.visit(this);
} else {
response = new ExceptionResponse(this.stopError);
}
} catch (Throwable e) {
if (SERVICELOG.isDebugEnabled() && e.getClass() != BrokerStoppedException.class) {
SERVICELOG.debug("Error occured while processing " + (responseRequired ? "sync" : "async")
+ " command: " + command + ", exception: " + e, e);
}
if (e instanceof SuppressReplyException || (e.getCause() instanceof SuppressReplyException)) {
LOG.info("Suppressing reply to: " + command + " on: " + e + ", cause: " + e.getCause());
responseRequired = false;
}
if (responseRequired) {
if (e instanceof SecurityException || e.getCause() instanceof SecurityException) {
SERVICELOG.warn("Security Error occurred: {}", e.getMessage());
}
response = new ExceptionResponse(e);
} else {
serviceException(e);
}
}
if (responseRequired) {
if (response == null) {
response = new Response();
}
response.setCorrelationId(commandId);
}
// The context may have been flagged so that the response is not
// sent.
if (context != null) {
if (context.isDontSendReponse()) {
context.setDontSendReponse(false);
response = null;
}
context = null;
}
MDC.remove("activemq.connector");
return response;
}
@Override
public Response processKeepAlive(KeepAliveInfo info) throws Exception {
return null;
}
@Override
public Response processRemoveSubscription(RemoveSubscriptionInfo info) throws Exception {
broker.removeSubscription(lookupConnectionState(info.getConnectionId()).getContext(), info);
return null;
}
@Override
public Response processWireFormat(WireFormatInfo info) throws Exception {
wireFormatInfo = info;
protocolVersion.set(info.getVersion());
return null;
}
@Override
public Response processShutdown(ShutdownInfo info) throws Exception {
stopAsync();
return null;
}
@Override
public Response processFlush(FlushCommand command) throws Exception {
return null;
}
@Override
public Response processBeginTransaction(TransactionInfo info) throws Exception {
TransportConnectionState cs = lookupConnectionState(info.getConnectionId());
context = null;
if (cs != null) {
context = cs.getContext();
}
if (cs == null) {
throw new NullPointerException("Context is null");
}
// Avoid replaying dup commands
if (cs.getTransactionState(info.getTransactionId()) == null) {
cs.addTransactionState(info.getTransactionId());
broker.beginTransaction(context, info.getTransactionId());
}
return null;
}
@Override
public int getActiveTransactionCount() {
int rc = 0;
for (TransportConnectionState cs : connectionStateRegister.listConnectionStates()) {
Collection<TransactionState> transactions = cs.getTransactionStates();
for (TransactionState transaction : transactions) {
rc++;
}
}
return rc;
}
@Override
public Long getOldestActiveTransactionDuration() {
TransactionState oldestTX = null;
for (TransportConnectionState cs : connectionStateRegister.listConnectionStates()) {
Collection<TransactionState> transactions = cs.getTransactionStates();
for (TransactionState transaction : transactions) {
if( oldestTX ==null || oldestTX.getCreatedAt() < transaction.getCreatedAt() ) {
oldestTX = transaction;
}
}
}
if( oldestTX == null ) {
return null;
}
return System.currentTimeMillis() - oldestTX.getCreatedAt();
}
@Override
public Response processEndTransaction(TransactionInfo info) throws Exception {
// No need to do anything. This packet is just sent by the client
// make sure he is synced with the server as commit command could
// come from a different connection.
return null;
}
@Override
public Response processPrepareTransaction(TransactionInfo info) throws Exception {
TransportConnectionState cs = lookupConnectionState(info.getConnectionId());
context = null;
if (cs != null) {
context = cs.getContext();
}
if (cs == null) {
throw new NullPointerException("Context is null");
}
TransactionState transactionState = cs.getTransactionState(info.getTransactionId());
if (transactionState == null) {
throw new IllegalStateException("Cannot prepare a transaction that had not been started or previously returned XA_RDONLY: "
+ info.getTransactionId());
}
// Avoid dups.
if (!transactionState.isPrepared()) {
transactionState.setPrepared(true);
int result = broker.prepareTransaction(context, info.getTransactionId());
transactionState.setPreparedResult(result);
if (result == XAResource.XA_RDONLY) {
// we are done, no further rollback or commit from TM
cs.removeTransactionState(info.getTransactionId());
}
IntegerResponse response = new IntegerResponse(result);
return response;
} else {
IntegerResponse response = new IntegerResponse(transactionState.getPreparedResult());
return response;
}
}
@Override
public Response processCommitTransactionOnePhase(TransactionInfo info) throws Exception {
TransportConnectionState cs = lookupConnectionState(info.getConnectionId());
context = cs.getContext();
cs.removeTransactionState(info.getTransactionId());
broker.commitTransaction(context, info.getTransactionId(), true);
return null;
}
@Override
public Response processCommitTransactionTwoPhase(TransactionInfo info) throws Exception {
TransportConnectionState cs = lookupConnectionState(info.getConnectionId());
context = cs.getContext();
cs.removeTransactionState(info.getTransactionId());
broker.commitTransaction(context, info.getTransactionId(), false);
return null;
}
@Override
public Response processRollbackTransaction(TransactionInfo info) throws Exception {
TransportConnectionState cs = lookupConnectionState(info.getConnectionId());
context = cs.getContext();
cs.removeTransactionState(info.getTransactionId());
broker.rollbackTransaction(context, info.getTransactionId());
return null;
}
@Override
public Response processForgetTransaction(TransactionInfo info) throws Exception {
TransportConnectionState cs = lookupConnectionState(info.getConnectionId());
context = cs.getContext();
broker.forgetTransaction(context, info.getTransactionId());
return null;
}
@Override
public Response processRecoverTransactions(TransactionInfo info) throws Exception {
TransportConnectionState cs = lookupConnectionState(info.getConnectionId());
context = cs.getContext();
TransactionId[] preparedTransactions = broker.getPreparedTransactions(context);
return new DataArrayResponse(preparedTransactions);
}
@Override
public Response processMessage(Message messageSend) throws Exception {
ProducerId producerId = messageSend.getProducerId();
ProducerBrokerExchange producerExchange = getProducerBrokerExchange(producerId);
if (producerExchange.canDispatch(messageSend)) {
broker.send(producerExchange, messageSend);
}
return null;
}
@Override
public Response processMessageAck(MessageAck ack) throws Exception {
ConsumerBrokerExchange consumerExchange = getConsumerBrokerExchange(ack.getConsumerId());
if (consumerExchange != null) {
broker.acknowledge(consumerExchange, ack);
} else if (ack.isInTransaction()) {
LOG.warn("no matching consumer, ignoring ack {}", consumerExchange, ack);
}
return null;
}
@Override
public Response processMessagePull(MessagePull pull) throws Exception {
return broker.messagePull(lookupConnectionState(pull.getConsumerId()).getContext(), pull);
}
@Override
public Response processMessageDispatchNotification(MessageDispatchNotification notification) throws Exception {
broker.processDispatchNotification(notification);
return null;
}
@Override
public Response processAddDestination(DestinationInfo info) throws Exception {
TransportConnectionState cs = lookupConnectionState(info.getConnectionId());
broker.addDestinationInfo(cs.getContext(), info);
if (info.getDestination().isTemporary()) {
cs.addTempDestination(info);
}
return null;
}
@Override
public Response processRemoveDestination(DestinationInfo info) throws Exception {
TransportConnectionState cs = lookupConnectionState(info.getConnectionId());
broker.removeDestinationInfo(cs.getContext(), info);
if (info.getDestination().isTemporary()) {
cs.removeTempDestination(info.getDestination());
}
return null;
}
@Override
public Response processAddProducer(ProducerInfo info) throws Exception {
SessionId sessionId = info.getProducerId().getParentId();
ConnectionId connectionId = sessionId.getParentId();
TransportConnectionState cs = lookupConnectionState(connectionId);
if (cs == null) {
throw new IllegalStateException("Cannot add a producer to a connection that had not been registered: "
+ connectionId);
}
SessionState ss = cs.getSessionState(sessionId);
if (ss == null) {
throw new IllegalStateException("Cannot add a producer to a session that had not been registered: "
+ sessionId);
}
// Avoid replaying dup commands
if (!ss.getProducerIds().contains(info.getProducerId())) {
ActiveMQDestination destination = info.getDestination();
if (destination != null && !AdvisorySupport.isAdvisoryTopic(destination)) {
if (getProducerCount(connectionId) >= connector.getMaximumProducersAllowedPerConnection()){
throw new IllegalStateException("Can't add producer on connection " + connectionId + ": at maximum limit: " + connector.getMaximumProducersAllowedPerConnection());
}
}
broker.addProducer(cs.getContext(), info);
try {
ss.addProducer(info);
} catch (IllegalStateException e) {
broker.removeProducer(cs.getContext(), info);
}
}
return null;
}
@Override
public Response processRemoveProducer(ProducerId id) throws Exception {
SessionId sessionId = id.getParentId();
ConnectionId connectionId = sessionId.getParentId();
TransportConnectionState cs = lookupConnectionState(connectionId);
SessionState ss = cs.getSessionState(sessionId);
if (ss == null) {
throw new IllegalStateException("Cannot remove a producer from a session that had not been registered: "
+ sessionId);
}
ProducerState ps = ss.removeProducer(id);
if (ps == null) {
throw new IllegalStateException("Cannot remove a producer that had not been registered: " + id);
}
removeProducerBrokerExchange(id);
broker.removeProducer(cs.getContext(), ps.getInfo());
return null;
}
@Override
public Response processAddConsumer(ConsumerInfo info) throws Exception {
SessionId sessionId = info.getConsumerId().getParentId();
ConnectionId connectionId = sessionId.getParentId();
TransportConnectionState cs = lookupConnectionState(connectionId);
if (cs == null) {
throw new IllegalStateException("Cannot add a consumer to a connection that had not been registered: "
+ connectionId);
}
SessionState ss = cs.getSessionState(sessionId);
if (ss == null) {
throw new IllegalStateException(broker.getBrokerName()
+ " Cannot add a consumer to a session that had not been registered: " + sessionId);
}
// Avoid replaying dup commands
if (!ss.getConsumerIds().contains(info.getConsumerId())) {
ActiveMQDestination destination = info.getDestination();
if (destination != null && !AdvisorySupport.isAdvisoryTopic(destination)) {
if (getConsumerCount(connectionId) >= connector.getMaximumConsumersAllowedPerConnection()){
throw new IllegalStateException("Can't add consumer on connection " + connectionId + ": at maximum limit: " + connector.getMaximumConsumersAllowedPerConnection());
}
}
broker.addConsumer(cs.getContext(), info);
try {
ss.addConsumer(info);
addConsumerBrokerExchange(info.getConsumerId());
} catch (IllegalStateException e) {
broker.removeConsumer(cs.getContext(), info);
}
}
return null;
}
@Override
public Response processRemoveConsumer(ConsumerId id, long lastDeliveredSequenceId) throws Exception {
SessionId sessionId = id.getParentId();
ConnectionId connectionId = sessionId.getParentId();
TransportConnectionState cs = lookupConnectionState(connectionId);
if (cs == null) {
throw new IllegalStateException("Cannot remove a consumer from a connection that had not been registered: "
+ connectionId);
}
SessionState ss = cs.getSessionState(sessionId);
if (ss == null) {
throw new IllegalStateException("Cannot remove a consumer from a session that had not been registered: "
+ sessionId);
}
ConsumerState consumerState = ss.removeConsumer(id);
if (consumerState == null) {
throw new IllegalStateException("Cannot remove a consumer that had not been registered: " + id);
}
ConsumerInfo info = consumerState.getInfo();
info.setLastDeliveredSequenceId(lastDeliveredSequenceId);
broker.removeConsumer(cs.getContext(), consumerState.getInfo());
removeConsumerBrokerExchange(id);
return null;
}
@Override
public Response processAddSession(SessionInfo info) throws Exception {
ConnectionId connectionId = info.getSessionId().getParentId();
TransportConnectionState cs = lookupConnectionState(connectionId);
// Avoid replaying dup commands
if (cs != null && !cs.getSessionIds().contains(info.getSessionId())) {
broker.addSession(cs.getContext(), info);
try {
cs.addSession(info);
} catch (IllegalStateException e) {
e.printStackTrace();
broker.removeSession(cs.getContext(), info);
}
}
return null;
}
@Override
public Response processRemoveSession(SessionId id, long lastDeliveredSequenceId) throws Exception {
ConnectionId connectionId = id.getParentId();
TransportConnectionState cs = lookupConnectionState(connectionId);
if (cs == null) {
throw new IllegalStateException("Cannot remove session from connection that had not been registered: " + connectionId);
}
SessionState session = cs.getSessionState(id);
if (session == null) {
throw new IllegalStateException("Cannot remove session that had not been registered: " + id);
}
// Don't let new consumers or producers get added while we are closing
// this down.
session.shutdown();
// Cascade the connection stop to the consumers and producers.
for (ConsumerId consumerId : session.getConsumerIds()) {
try {
processRemoveConsumer(consumerId, lastDeliveredSequenceId);
} catch (Throwable e) {
LOG.warn("Failed to remove consumer: {}", consumerId, e);
}
}
for (ProducerId producerId : session.getProducerIds()) {
try {
processRemoveProducer(producerId);
} catch (Throwable e) {
LOG.warn("Failed to remove producer: {}", producerId, e);
}
}
cs.removeSession(id);
broker.removeSession(cs.getContext(), session.getInfo());
return null;
}
@Override
public Response processAddConnection(ConnectionInfo info) throws Exception {
// Older clients should have been defaulting this field to true.. but
// they were not.
if (wireFormatInfo != null && wireFormatInfo.getVersion() <= 2) {
info.setClientMaster(true);
}
TransportConnectionState state;
// Make sure 2 concurrent connections by the same ID only generate 1
// TransportConnectionState object.
synchronized (brokerConnectionStates) {
state = (TransportConnectionState) brokerConnectionStates.get(info.getConnectionId());
if (state == null) {
state = new TransportConnectionState(info, this);
brokerConnectionStates.put(info.getConnectionId(), state);
}
state.incrementReference();
}
// If there are 2 concurrent connections for the same connection id,
// then last one in wins, we need to sync here
// to figure out the winner.
synchronized (state.getConnectionMutex()) {
if (state.getConnection() != this) {
LOG.debug("Killing previous stale connection: {}", state.getConnection().getRemoteAddress());
state.getConnection().stop();
LOG.debug("Connection {} taking over previous connection: {}", getRemoteAddress(), state.getConnection().getRemoteAddress());
state.setConnection(this);
state.reset(info);
}
}
registerConnectionState(info.getConnectionId(), state);
LOG.debug("Setting up new connection id: {}, address: {}, info: {}", new Object[]{ info.getConnectionId(), getRemoteAddress(), info });
this.faultTolerantConnection = info.isFaultTolerant();
// Setup the context.
String clientId = info.getClientId();
context = new ConnectionContext();
context.setBroker(broker);
context.setClientId(clientId);
context.setClientMaster(info.isClientMaster());
context.setConnection(this);
context.setConnectionId(info.getConnectionId());
context.setConnector(connector);
context.setMessageAuthorizationPolicy(getMessageAuthorizationPolicy());
context.setNetworkConnection(networkConnection);
context.setFaultTolerant(faultTolerantConnection);
context.setTransactions(new ConcurrentHashMap<TransactionId, Transaction>());
context.setUserName(info.getUserName());
context.setWireFormatInfo(wireFormatInfo);
context.setReconnect(info.isFailoverReconnect());
this.manageable = info.isManageable();
context.setConnectionState(state);
state.setContext(context);
state.setConnection(this);
if (info.getClientIp() == null) {
info.setClientIp(getRemoteAddress());
}
try {
broker.addConnection(context, info);
} catch (Exception e) {
synchronized (brokerConnectionStates) {
brokerConnectionStates.remove(info.getConnectionId());
}
unregisterConnectionState(info.getConnectionId());
LOG.warn("Failed to add Connection {}", info.getConnectionId(), e);
if (e instanceof SecurityException) {
// close this down - in case the peer of this transport doesn't play nice
delayedStop(2000, "Failed with SecurityException: " + e.getLocalizedMessage(), e);
}
throw e;
}
if (info.isManageable()) {
// send ConnectionCommand
ConnectionControl command = this.connector.getConnectionControl();
command.setFaultTolerant(broker.isFaultTolerantConfiguration());
if (info.isFailoverReconnect()) {
command.setRebalanceConnection(false);
}
dispatchAsync(command);
}
return null;
}
@Override
public synchronized Response processRemoveConnection(ConnectionId id, long lastDeliveredSequenceId)
throws InterruptedException {
LOG.debug("remove connection id: {}", id);
TransportConnectionState cs = lookupConnectionState(id);
if (cs != null) {
// Don't allow things to be added to the connection state while we
// are shutting down.
cs.shutdown();
// Cascade the connection stop to the sessions.
for (SessionId sessionId : cs.getSessionIds()) {
try {
processRemoveSession(sessionId, lastDeliveredSequenceId);
} catch (Throwable e) {
SERVICELOG.warn("Failed to remove session {}", sessionId, e);
}
}
// Cascade the connection stop to temp destinations.
for (Iterator<DestinationInfo> iter = cs.getTempDestinations().iterator(); iter.hasNext(); ) {
DestinationInfo di = iter.next();
try {
broker.removeDestination(cs.getContext(), di.getDestination(), 0);
} catch (Throwable e) {
SERVICELOG.warn("Failed to remove tmp destination {}", di.getDestination(), e);
}
iter.remove();
}
try {
broker.removeConnection(cs.getContext(), cs.getInfo(), null);
} catch (Throwable e) {
SERVICELOG.warn("Failed to remove connection {}", cs.getInfo(), e);
}
TransportConnectionState state = unregisterConnectionState(id);
if (state != null) {
synchronized (brokerConnectionStates) {
// If we are the last reference, we should remove the state
// from the broker.
if (state.decrementReference() == 0) {
brokerConnectionStates.remove(id);
}
}
}
}
return null;
}
@Override
public Response processProducerAck(ProducerAck ack) throws Exception {
// A broker should not get ProducerAck messages.
return null;
}
@Override
public Connector getConnector() {
return connector;
}
@Override
public void dispatchSync(Command message) {
try {
processDispatch(message);
} catch (IOException e) {
serviceExceptionAsync(e);
}
}
@Override
public void dispatchAsync(Command message) {
if (!stopping.get()) {
if (taskRunner == null) {
dispatchSync(message);
} else {
synchronized (dispatchQueue) {
dispatchQueue.add(message);
}
try {
taskRunner.wakeup();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
} else {
if (message.isMessageDispatch()) {
MessageDispatch md = (MessageDispatch) message;
TransmitCallback sub = md.getTransmitCallback();
broker.postProcessDispatch(md);
if (sub != null) {
sub.onFailure();
}
}
}
}
protected void processDispatch(Command command) throws IOException {
MessageDispatch messageDispatch = (MessageDispatch) (command.isMessageDispatch() ? command : null);
try {
if (!stopping.get()) {
if (messageDispatch != null) {
broker.preProcessDispatch(messageDispatch);
}
dispatch(command);
}
} catch (IOException e) {
if (messageDispatch != null) {
TransmitCallback sub = messageDispatch.getTransmitCallback();
broker.postProcessDispatch(messageDispatch);
if (sub != null) {
sub.onFailure();
}
messageDispatch = null;
throw e;
}
} finally {
if (messageDispatch != null) {
TransmitCallback sub = messageDispatch.getTransmitCallback();
broker.postProcessDispatch(messageDispatch);
if (sub != null) {
sub.onSuccess();
}
}
}
}
@Override
public boolean iterate() {
try {
if (pendingStop || stopping.get()) {
if (dispatchStopped.compareAndSet(false, true)) {
if (transportException.get() == null) {
try {
dispatch(new ShutdownInfo());
} catch (Throwable ignore) {
}
}
dispatchStoppedLatch.countDown();
}
return false;
}
if (!dispatchStopped.get()) {
Command command = null;
synchronized (dispatchQueue) {
if (dispatchQueue.isEmpty()) {
return false;
}
command = dispatchQueue.remove(0);
}
processDispatch(command);
return true;
}
return false;
} catch (IOException e) {
if (dispatchStopped.compareAndSet(false, true)) {
dispatchStoppedLatch.countDown();
}
serviceExceptionAsync(e);
return false;
}
}
/**
* Returns the statistics for this connection
*/
@Override
public ConnectionStatistics getStatistics() {
return statistics;
}
public MessageAuthorizationPolicy getMessageAuthorizationPolicy() {
return messageAuthorizationPolicy;
}
public void setMessageAuthorizationPolicy(MessageAuthorizationPolicy messageAuthorizationPolicy) {
this.messageAuthorizationPolicy = messageAuthorizationPolicy;
}
@Override
public boolean isManageable() {
return manageable;
}
@Override
public void start() throws Exception {
try {
synchronized (this) {
starting = true;
if (taskRunnerFactory != null) {
taskRunner = taskRunnerFactory.createTaskRunner(this, "ActiveMQ Connection Dispatcher: "
+ getRemoteAddress());
} else {
taskRunner = null;
}
transport.start();
active = true;
BrokerInfo info = connector.getBrokerInfo().copy();
if (connector.isUpdateClusterClients()) {
info.setPeerBrokerInfos(this.broker.getPeerBrokerInfos());
} else {
info.setPeerBrokerInfos(null);
}
dispatchAsync(info);
connector.onStarted(this);
}
} catch (Exception e) {
// Force clean up on an error starting up.
pendingStop = true;
throw e;
} finally {
// stop() can be called from within the above block,
// but we want to be sure start() completes before
// stop() runs, so queue the stop until right now:
setStarting(false);
if (isPendingStop()) {
LOG.debug("Calling the delayed stop() after start() {}", this);
stop();
}
}
}
@Override
public void stop() throws Exception {
// do not stop task the task runner factories (taskRunnerFactory, stopTaskRunnerFactory)
// as their lifecycle is handled elsewhere
stopAsync();
while (!stopped.await(5, TimeUnit.SECONDS)) {
LOG.info("The connection to '{}' is taking a long time to shutdown.", transport.getRemoteAddress());
}
}
public void delayedStop(final int waitTime, final String reason, Throwable cause) {
if (waitTime > 0) {
synchronized (this) {
pendingStop = true;
stopError = cause;
}
try {
stopTaskRunnerFactory.execute(new Runnable() {
@Override
public void run() {
try {
Thread.sleep(waitTime);
stopAsync();
LOG.info("Stopping {} because {}", transport.getRemoteAddress(), reason);
} catch (InterruptedException e) {
}
}
});
} catch (Throwable t) {
LOG.warn("Cannot create stopAsync. This exception will be ignored.", t);
}
}
}
public void stopAsync() {
// If we're in the middle of starting then go no further... for now.
synchronized (this) {
pendingStop = true;
if (starting) {
LOG.debug("stopAsync() called in the middle of start(). Delaying till start completes..");
return;
}
}
if (stopping.compareAndSet(false, true)) {
// Let all the connection contexts know we are shutting down
// so that in progress operations can notice and unblock.
List<TransportConnectionState> connectionStates = listConnectionStates();
for (TransportConnectionState cs : connectionStates) {
ConnectionContext connectionContext = cs.getContext();
if (connectionContext != null) {
connectionContext.getStopping().set(true);
}
}
try {
stopTaskRunnerFactory.execute(new Runnable() {
@Override
public void run() {
serviceLock.writeLock().lock();
try {
doStop();
} catch (Throwable e) {
LOG.debug("Error occurred while shutting down a connection {}", this, e);
} finally {
stopped.countDown();
serviceLock.writeLock().unlock();
}
}
});
} catch (Throwable t) {
LOG.warn("Cannot create async transport stopper thread. This exception is ignored. Not waiting for stop to complete", t);
stopped.countDown();
}
}
}
@Override
public String toString() {
return "Transport Connection to: " + transport.getRemoteAddress();
}
protected void doStop() throws Exception {
LOG.debug("Stopping connection: {}", transport.getRemoteAddress());
connector.onStopped(this);
try {
synchronized (this) {
if (duplexBridge != null) {
duplexBridge.stop();
}
}
} catch (Exception ignore) {
LOG.trace("Exception caught stopping. This exception is ignored.", ignore);
}
try {
transport.stop();
LOG.debug("Stopped transport: {}", transport.getRemoteAddress());
} catch (Exception e) {
LOG.debug("Could not stop transport to {}. This exception is ignored.", transport.getRemoteAddress(), e);
}
if (taskRunner != null) {
taskRunner.shutdown(1);
taskRunner = null;
}
active = false;
// Run the MessageDispatch callbacks so that message references get
// cleaned up.
synchronized (dispatchQueue) {
for (Iterator<Command> iter = dispatchQueue.iterator(); iter.hasNext(); ) {
Command command = iter.next();
if (command.isMessageDispatch()) {
MessageDispatch md = (MessageDispatch) command;
TransmitCallback sub = md.getTransmitCallback();
broker.postProcessDispatch(md);
if (sub != null) {
sub.onFailure();
}
}
}
dispatchQueue.clear();
}
//
// Remove all logical connection associated with this connection
// from the broker.
if (!broker.isStopped()) {
List<TransportConnectionState> connectionStates = listConnectionStates();
connectionStates = listConnectionStates();
for (TransportConnectionState cs : connectionStates) {
cs.getContext().getStopping().set(true);
try {
LOG.debug("Cleaning up connection resources: {}", getRemoteAddress());
processRemoveConnection(cs.getInfo().getConnectionId(), 0l);
} catch (Throwable ignore) {
ignore.printStackTrace();
}
}
}
LOG.debug("Connection Stopped: {}", getRemoteAddress());
}
/**
* @return Returns the blockedCandidate.
*/
public boolean isBlockedCandidate() {
return blockedCandidate;
}
/**
* @param blockedCandidate The blockedCandidate to set.
*/
public void setBlockedCandidate(boolean blockedCandidate) {
this.blockedCandidate = blockedCandidate;
}
/**
* @return Returns the markedCandidate.
*/
public boolean isMarkedCandidate() {
return markedCandidate;
}
/**
* @param markedCandidate The markedCandidate to set.
*/
public void setMarkedCandidate(boolean markedCandidate) {
this.markedCandidate = markedCandidate;
if (!markedCandidate) {
timeStamp = 0;
blockedCandidate = false;
}
}
/**
* @param slow The slow to set.
*/
public void setSlow(boolean slow) {
this.slow = slow;
}
/**
* @return true if the Connection is slow
*/
@Override
public boolean isSlow() {
return slow;
}
/**
* @return true if the Connection is potentially blocked
*/
public boolean isMarkedBlockedCandidate() {
return markedCandidate;
}
/**
* Mark the Connection, so we can deem if it's collectable on the next sweep
*/
public void doMark() {
if (timeStamp == 0) {
timeStamp = System.currentTimeMillis();
}
}
/**
* @return if after being marked, the Connection is still writing
*/
@Override
public boolean isBlocked() {
return blocked;
}
/**
* @return true if the Connection is connected
*/
@Override
public boolean isConnected() {
return connected;
}
/**
* @param blocked The blocked to set.
*/
public void setBlocked(boolean blocked) {
this.blocked = blocked;
}
/**
* @param connected The connected to set.
*/
public void setConnected(boolean connected) {
this.connected = connected;
}
/**
* @return true if the Connection is active
*/
@Override
public boolean isActive() {
return active;
}
/**
* @param active The active to set.
*/
public void setActive(boolean active) {
this.active = active;
}
/**
* @return true if the Connection is starting
*/
public synchronized boolean isStarting() {
return starting;
}
@Override
public synchronized boolean isNetworkConnection() {
return networkConnection;
}
@Override
public boolean isFaultTolerantConnection() {
return this.faultTolerantConnection;
}
protected synchronized void setStarting(boolean starting) {
this.starting = starting;
}
/**
* @return true if the Connection needs to stop
*/
public synchronized boolean isPendingStop() {
return pendingStop;
}
protected synchronized void setPendingStop(boolean pendingStop) {
this.pendingStop = pendingStop;
}
@Override
public Response processBrokerInfo(BrokerInfo info) {
if (info.isSlaveBroker()) {
LOG.error(" Slave Brokers are no longer supported - slave trying to attach is: {}", info.getBrokerName());
} else if (info.isNetworkConnection() && info.isDuplexConnection()) {
// so this TransportConnection is the rear end of a network bridge
// We have been requested to create a two way pipe ...
try {
Properties properties = MarshallingSupport.stringToProperties(info.getNetworkProperties());
Map<String, String> props = createMap(properties);
NetworkBridgeConfiguration config = new NetworkBridgeConfiguration();
IntrospectionSupport.setProperties(config, props, "");
config.setBrokerName(broker.getBrokerName());
// check for existing duplex connection hanging about
// We first look if existing network connection already exists for the same broker Id and network connector name
// It's possible in case of brief network fault to have this transport connector side of the connection always active
// and the duplex network connector side wanting to open a new one
// In this case, the old connection must be broken
String duplexNetworkConnectorId = config.getName() + "@" + info.getBrokerId();
CopyOnWriteArrayList<TransportConnection> connections = this.connector.getConnections();
synchronized (connections) {
for (Iterator<TransportConnection> iter = connections.iterator(); iter.hasNext(); ) {
TransportConnection c = iter.next();
if ((c != this) && (duplexNetworkConnectorId.equals(c.getDuplexNetworkConnectorId()))) {
LOG.warn("Stopping an existing active duplex connection [{}] for network connector ({}).", c, duplexNetworkConnectorId);
c.stopAsync();
// better to wait for a bit rather than get connection id already in use and failure to start new bridge
c.getStopped().await(1, TimeUnit.SECONDS);
}
}
setDuplexNetworkConnectorId(duplexNetworkConnectorId);
}
Transport localTransport = NetworkBridgeFactory.createLocalTransport(broker);
Transport remoteBridgeTransport = transport;
if (! (remoteBridgeTransport instanceof ResponseCorrelator)) {
// the vm transport case is already wrapped
remoteBridgeTransport = new ResponseCorrelator(remoteBridgeTransport);
}
String duplexName = localTransport.toString();
if (duplexName.contains("#")) {
duplexName = duplexName.substring(duplexName.lastIndexOf("#"));
}
MBeanNetworkListener listener = new MBeanNetworkListener(broker.getBrokerService(), config, broker.getBrokerService().createDuplexNetworkConnectorObjectName(duplexName));
listener.setCreatedByDuplex(true);
duplexBridge = NetworkBridgeFactory.createBridge(config, localTransport, remoteBridgeTransport, listener);
duplexBridge.setBrokerService(broker.getBrokerService());
// now turn duplex off this side
info.setDuplexConnection(false);
duplexBridge.setCreatedByDuplex(true);
duplexBridge.duplexStart(this, brokerInfo, info);
LOG.info("Started responder end of duplex bridge {}", duplexNetworkConnectorId);
return null;
} catch (TransportDisposedIOException e) {
LOG.warn("Duplex bridge {} was stopped before it was correctly started.", duplexNetworkConnectorId);
return null;
} catch (Exception e) {
LOG.error("Failed to create responder end of duplex network bridge {}", duplexNetworkConnectorId, e);
return null;
}
}
// We only expect to get one broker info command per connection
if (this.brokerInfo != null) {
LOG.warn("Unexpected extra broker info command received: {}", info);
}
this.brokerInfo = info;
networkConnection = true;
List<TransportConnectionState> connectionStates = listConnectionStates();
for (TransportConnectionState cs : connectionStates) {
cs.getContext().setNetworkConnection(true);
}
return null;
}
@SuppressWarnings({"unchecked", "rawtypes"})
private HashMap<String, String> createMap(Properties properties) {
return new HashMap(properties);
}
protected void dispatch(Command command) throws IOException {
try {
setMarkedCandidate(true);
transport.oneway(command);
} finally {
setMarkedCandidate(false);
}
}
@Override
public String getRemoteAddress() {
return transport.getRemoteAddress();
}
public Transport getTransport() {
return transport;
}
@Override
public String getConnectionId() {
List<TransportConnectionState> connectionStates = listConnectionStates();
for (TransportConnectionState cs : connectionStates) {
if (cs.getInfo().getClientId() != null) {
return cs.getInfo().getClientId();
}
return cs.getInfo().getConnectionId().toString();
}
return null;
}
@Override
public void updateClient(ConnectionControl control) {
if (isActive() && isBlocked() == false && isFaultTolerantConnection() && this.wireFormatInfo != null
&& this.wireFormatInfo.getVersion() >= 6) {
dispatchAsync(control);
}
}
public ProducerBrokerExchange getProducerBrokerExchangeIfExists(ProducerInfo producerInfo){
ProducerBrokerExchange result = null;
if (producerInfo != null && producerInfo.getProducerId() != null){
synchronized (producerExchanges){
result = producerExchanges.get(producerInfo.getProducerId());
}
}
return result;
}
private ProducerBrokerExchange getProducerBrokerExchange(ProducerId id) throws IOException {
ProducerBrokerExchange result = producerExchanges.get(id);
if (result == null) {
synchronized (producerExchanges) {
result = new ProducerBrokerExchange();
TransportConnectionState state = lookupConnectionState(id);
context = state.getContext();
result.setConnectionContext(context);
if (context.isReconnect() || (context.isNetworkConnection() && connector.isAuditNetworkProducers())) {
result.setLastStoredSequenceId(broker.getBrokerService().getPersistenceAdapter().getLastProducerSequenceId(id));
}
SessionState ss = state.getSessionState(id.getParentId());
if (ss != null) {
result.setProducerState(ss.getProducerState(id));
ProducerState producerState = ss.getProducerState(id);
if (producerState != null && producerState.getInfo() != null) {
ProducerInfo info = producerState.getInfo();
result.setMutable(info.getDestination() == null || info.getDestination().isComposite());
}
}
producerExchanges.put(id, result);
}
} else {
context = result.getConnectionContext();
}
return result;
}
private void removeProducerBrokerExchange(ProducerId id) {
synchronized (producerExchanges) {
producerExchanges.remove(id);
}
}
private ConsumerBrokerExchange getConsumerBrokerExchange(ConsumerId id) {
ConsumerBrokerExchange result = consumerExchanges.get(id);
return result;
}
private ConsumerBrokerExchange addConsumerBrokerExchange(ConsumerId id) {
ConsumerBrokerExchange result = consumerExchanges.get(id);
if (result == null) {
synchronized (consumerExchanges) {
result = new ConsumerBrokerExchange();
TransportConnectionState state = lookupConnectionState(id);
context = state.getContext();
result.setConnectionContext(context);
SessionState ss = state.getSessionState(id.getParentId());
if (ss != null) {
ConsumerState cs = ss.getConsumerState(id);
if (cs != null) {
ConsumerInfo info = cs.getInfo();
if (info != null) {
if (info.getDestination() != null && info.getDestination().isPattern()) {
result.setWildcard(true);
}
}
}
}
consumerExchanges.put(id, result);
}
}
return result;
}
private void removeConsumerBrokerExchange(ConsumerId id) {
synchronized (consumerExchanges) {
consumerExchanges.remove(id);
}
}
public int getProtocolVersion() {
return protocolVersion.get();
}
@Override
public Response processControlCommand(ControlCommand command) throws Exception {
String control = command.getCommand();
if (control != null && control.equals("shutdown")) {
System.exit(0);
}
return null;
}
@Override
public Response processMessageDispatch(MessageDispatch dispatch) throws Exception {
return null;
}
@Override
public Response processConnectionControl(ConnectionControl control) throws Exception {
if (control != null) {
faultTolerantConnection = control.isFaultTolerant();
}
return null;
}
@Override
public Response processConnectionError(ConnectionError error) throws Exception {
return null;
}
@Override
public Response processConsumerControl(ConsumerControl control) throws Exception {
ConsumerBrokerExchange consumerExchange = getConsumerBrokerExchange(control.getConsumerId());
broker.processConsumerControl(consumerExchange, control);
return null;
}
protected synchronized TransportConnectionState registerConnectionState(ConnectionId connectionId,
TransportConnectionState state) {
TransportConnectionState cs = null;
if (!connectionStateRegister.isEmpty() && !connectionStateRegister.doesHandleMultipleConnectionStates()) {
// swap implementations
TransportConnectionStateRegister newRegister = new MapTransportConnectionStateRegister();
newRegister.intialize(connectionStateRegister);
connectionStateRegister = newRegister;
}
cs = connectionStateRegister.registerConnectionState(connectionId, state);
return cs;
}
protected synchronized TransportConnectionState unregisterConnectionState(ConnectionId connectionId) {
return connectionStateRegister.unregisterConnectionState(connectionId);
}
protected synchronized List<TransportConnectionState> listConnectionStates() {
return connectionStateRegister.listConnectionStates();
}
protected synchronized TransportConnectionState lookupConnectionState(String connectionId) {
return connectionStateRegister.lookupConnectionState(connectionId);
}
protected synchronized TransportConnectionState lookupConnectionState(ConsumerId id) {
return connectionStateRegister.lookupConnectionState(id);
}
protected synchronized TransportConnectionState lookupConnectionState(ProducerId id) {
return connectionStateRegister.lookupConnectionState(id);
}
protected synchronized TransportConnectionState lookupConnectionState(SessionId id) {
return connectionStateRegister.lookupConnectionState(id);
}
// public only for testing
public synchronized TransportConnectionState lookupConnectionState(ConnectionId connectionId) {
return connectionStateRegister.lookupConnectionState(connectionId);
}
protected synchronized void setDuplexNetworkConnectorId(String duplexNetworkConnectorId) {
this.duplexNetworkConnectorId = duplexNetworkConnectorId;
}
protected synchronized String getDuplexNetworkConnectorId() {
return this.duplexNetworkConnectorId;
}
public boolean isStopping() {
return stopping.get();
}
protected CountDownLatch getStopped() {
return stopped;
}
private int getProducerCount(ConnectionId connectionId) {
int result = 0;
TransportConnectionState cs = lookupConnectionState(connectionId);
if (cs != null) {
for (SessionId sessionId : cs.getSessionIds()) {
SessionState sessionState = cs.getSessionState(sessionId);
if (sessionState != null) {
result += sessionState.getProducerIds().size();
}
}
}
return result;
}
private int getConsumerCount(ConnectionId connectionId) {
int result = 0;
TransportConnectionState cs = lookupConnectionState(connectionId);
if (cs != null) {
for (SessionId sessionId : cs.getSessionIds()) {
SessionState sessionState = cs.getSessionState(sessionId);
if (sessionState != null) {
result += sessionState.getConsumerIds().size();
}
}
}
return result;
}
public WireFormatInfo getRemoteWireFormatInfo() {
return wireFormatInfo;
}
}
|
./CrossVul/dataset_final_sorted/CWE-264/java/bad_2153_0
|
crossvul-java_data_bad_2091_1
|
/*
* The MIT License
*
* Copyright (c) 2004-2009, Sun Microsystems, Inc., Alan Harder
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.tasks;
import com.gargoylesoftware.htmlunit.html.HtmlForm;
import com.gargoylesoftware.htmlunit.html.HtmlPage;
import hudson.maven.MavenModuleSet;
import hudson.maven.MavenModuleSetBuild;
import hudson.model.FreeStyleBuild;
import hudson.model.FreeStyleProject;
import hudson.model.Result;
import hudson.model.Run;
import org.jvnet.hudson.test.ExtractResourceSCM;
import org.jvnet.hudson.test.HudsonTestCase;
import org.jvnet.hudson.test.MockBuilder;
/**
* Tests for hudson.tasks.BuildTrigger
* @author Alan.Harder@sun.com
*/
public class BuildTriggerTest extends HudsonTestCase {
private FreeStyleProject createDownstreamProject() throws Exception {
FreeStyleProject dp = createFreeStyleProject("downstream");
// Hm, no setQuietPeriod, have to submit form..
WebClient webClient = new WebClient();
HtmlPage page = webClient.getPage(dp,"configure");
HtmlForm form = page.getFormByName("config");
form.getInputByName("hasCustomQuietPeriod").click();
form.getInputByName("quiet_period").setValueAttribute("0");
submit(form);
assertEquals("set quiet period", 0, dp.getQuietPeriod());
return dp;
}
private void doTriggerTest(boolean evenWhenUnstable, Result triggerResult,
Result dontTriggerResult) throws Exception {
FreeStyleProject p = createFreeStyleProject(),
dp = createDownstreamProject();
p.getPublishersList().add(new BuildTrigger("downstream", evenWhenUnstable));
p.getBuildersList().add(new MockBuilder(dontTriggerResult));
jenkins.rebuildDependencyGraph();
// First build should not trigger downstream job
FreeStyleBuild b = p.scheduleBuild2(0).get();
assertNoDownstreamBuild(dp, b);
// Next build should trigger downstream job
p.getBuildersList().replace(new MockBuilder(triggerResult));
b = p.scheduleBuild2(0).get();
assertDownstreamBuild(dp, b);
}
private void assertNoDownstreamBuild(FreeStyleProject dp, Run<?,?> b) throws Exception {
for (int i = 0; i < 3; i++) {
Thread.sleep(200);
assertTrue("downstream build should not run! upstream log: " + getLog(b),
!dp.isInQueue() && !dp.isBuilding() && dp.getLastBuild()==null);
}
}
private void assertDownstreamBuild(FreeStyleProject dp, Run<?,?> b) throws Exception {
// Wait for downstream build
for (int i = 0; dp.getLastBuild()==null && i < 20; i++) Thread.sleep(100);
assertNotNull("downstream build didn't run.. upstream log: " + getLog(b), dp.getLastBuild());
}
public void testBuildTrigger() throws Exception {
doTriggerTest(false, Result.SUCCESS, Result.UNSTABLE);
}
public void testTriggerEvenWhenUnstable() throws Exception {
doTriggerTest(true, Result.UNSTABLE, Result.FAILURE);
}
private void doMavenTriggerTest(boolean evenWhenUnstable) throws Exception {
FreeStyleProject dp = createDownstreamProject();
configureDefaultMaven();
MavenModuleSet m = createMavenProject();
m.getPublishersList().add(new BuildTrigger("downstream", evenWhenUnstable));
if (!evenWhenUnstable) {
// Configure for UNSTABLE
m.setGoals("clean test");
m.setScm(new ExtractResourceSCM(getClass().getResource("maven-test-failure.zip")));
} // otherwise do nothing which gets FAILURE
// First build should not trigger downstream project
MavenModuleSetBuild b = m.scheduleBuild2(0).get();
assertNoDownstreamBuild(dp, b);
if (evenWhenUnstable) {
// Configure for UNSTABLE
m.setGoals("clean test");
m.setScm(new ExtractResourceSCM(getClass().getResource("maven-test-failure.zip")));
} else {
// Configure for SUCCESS
m.setGoals("clean");
m.setScm(new ExtractResourceSCM(getClass().getResource("maven-empty.zip")));
}
// Next build should trigger downstream project
b = m.scheduleBuild2(0).get();
assertDownstreamBuild(dp, b);
}
public void testMavenBuildTrigger() throws Exception {
doMavenTriggerTest(false);
}
public void testMavenTriggerEvenWhenUnstable() throws Exception {
doMavenTriggerTest(true);
}
}
|
./CrossVul/dataset_final_sorted/CWE-264/java/bad_2091_1
|
crossvul-java_data_good_2293_1
|
package org.uberfire.io.regex;
import java.net.URI;
import java.util.Collection;
import org.uberfire.java.nio.file.Path;
import static org.uberfire.commons.validation.Preconditions.*;
public final class AntPathMatcher {
private static org.uberfire.commons.regex.util.AntPathMatcher matcher = new org.uberfire.commons.regex.util.AntPathMatcher();
public static boolean filter( final Collection<String> includes,
final Collection<String> excludes,
final Path path ) {
checkNotNull( "includes", includes );
checkNotNull( "excludes", excludes );
checkNotNull( "path", path );
if ( includes.isEmpty() && excludes.isEmpty() ) {
return true;
} else if ( includes.isEmpty() ) {
return !( excludes( excludes, path ) );
} else if ( excludes.isEmpty() ) {
return includes( includes, path );
}
return includes( includes, path ) && !( excludes( excludes, path ) );
}
public static boolean filter( final Collection<String> includes,
final Collection<String> excludes,
final URI uri ) {
checkNotNull( "includes", includes );
checkNotNull( "excludes", excludes );
checkNotNull( "uri", uri );
if ( includes.isEmpty() && excludes.isEmpty() ) {
return true;
} else if ( includes.isEmpty() ) {
return !( excludes( excludes, uri ) );
} else if ( excludes.isEmpty() ) {
return includes( includes, uri );
}
return includes( includes, uri ) && !( excludes( excludes, uri ) );
}
public static boolean includes( final Collection<String> patterns,
final Path path ) {
checkNotNull( "patterns", patterns );
checkNotNull( "path", path );
return matches( patterns, path );
}
public static boolean includes( final Collection<String> patterns,
final URI uri ) {
checkNotNull( "patterns", patterns );
checkNotNull( "uri", uri );
return matches( patterns, uri );
}
public static boolean excludes( final Collection<String> patterns,
final URI uri ) {
checkNotNull( "patterns", patterns );
checkNotNull( "uri", uri );
return matches( patterns, uri );
}
public static boolean excludes( final Collection<String> patterns,
final Path path ) {
checkNotNull( "patterns", patterns );
checkNotNull( "path", path );
return matches( patterns, path );
}
private static boolean matches( final Collection<String> patterns,
final Path path ) {
return matches( patterns, path.toUri() );
}
private static boolean matches( final Collection<String> patterns,
final URI uri ) {
for ( final String pattern : patterns ) {
if ( matcher.match( pattern, uri.toString() ) ) {
return true;
}
}
return false;
}
}
|
./CrossVul/dataset_final_sorted/CWE-264/java/good_2293_1
|
crossvul-java_data_good_4708_0
|
package org.orbeon.oxf.xml.xerces;
import org.orbeon.oxf.common.OXFException;
import org.xml.sax.SAXException;
import org.xml.sax.SAXNotRecognizedException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import java.util.*;
/**
* Boasts a couple of improvements over the 'stock' xerces parser factory.
*
* o Doesn't create a new parser every time one calls setFeature or getFeature. Stock one
* has to do this because valid feature set is encapsulated in the parser code.
*
* o Creates a XercesJAXPSAXParser instead of SaxParserImpl. See XercesJAXPSAXParser for
* why this is an improvement.
*
* o The improvements cut the time it takes to a SAX parser via JAXP in
* half and reduce the amount of garbage created when accessing '/' in
* the examples app from 9019216 bytes to 8402880 bytes.
*/
public class XercesSAXParserFactoryImpl extends SAXParserFactory {
private static final Collection recognizedFeaturesNonValidatingXInclude;
private static final Map defaultFeaturesNonValidatingXInclude;
private static final Collection recognizedFeaturesNonValidatingNoXInclude;
private static final Map defaultFeaturesNonValidatingNoXInclude;
private static final Collection recognizedFeaturesValidatingXInclude;
private static final Map defaultFeaturesValidatingXInclude;
private static final Collection recognizedFeaturesValidatingNoXInclude;
private static final Map defaultFeaturesValidatingNoXInclude;
static {
{
final OrbeonParserConfiguration configuration = XercesSAXParser.makeConfig(false, true);
final Collection recognizedFeatures = configuration.getRecognizedFeatures();
recognizedFeaturesNonValidatingXInclude = Collections.unmodifiableCollection(recognizedFeatures);
defaultFeaturesNonValidatingXInclude = configuration.getFeatures();
addDefaultFeatures(defaultFeaturesNonValidatingXInclude);
}
{
final OrbeonParserConfiguration configuration = XercesSAXParser.makeConfig(false, false);
final Collection features = configuration.getRecognizedFeatures();
recognizedFeaturesNonValidatingNoXInclude = Collections.unmodifiableCollection(features);
defaultFeaturesNonValidatingNoXInclude = configuration.getFeatures();
addDefaultFeatures(defaultFeaturesNonValidatingNoXInclude);
}
{
final OrbeonParserConfiguration configuration = XercesSAXParser.makeConfig(true, true);
final Collection features = configuration.getRecognizedFeatures();
recognizedFeaturesValidatingXInclude = Collections.unmodifiableCollection(features);
defaultFeaturesValidatingXInclude = configuration.getFeatures();
addDefaultFeatures(defaultFeaturesValidatingXInclude);
}
{
final OrbeonParserConfiguration configuration = XercesSAXParser.makeConfig(true, false);
final Collection features = configuration.getRecognizedFeatures();
recognizedFeaturesValidatingNoXInclude = Collections.unmodifiableCollection(features);
defaultFeaturesValidatingNoXInclude = configuration.getFeatures();
addDefaultFeatures(defaultFeaturesValidatingNoXInclude);
}
}
private static void addDefaultFeatures(Map features) {
features.put("http://xml.org/sax/features/namespaces", Boolean.TRUE);
features.put("http://xml.org/sax/features/namespace-prefixes", Boolean.FALSE);
// For security purposes, disable external entities
features.put("http://xml.org/sax/features/external-general-entities", Boolean.FALSE);
features.put("http://xml.org/sax/features/external-parameter-entities", Boolean.FALSE);
}
private final Hashtable features;
private final boolean validating;
private final boolean handleXInclude;
public XercesSAXParserFactoryImpl() {
this(false, false);
}
public XercesSAXParserFactoryImpl(boolean validating, boolean handleXInclude) {
this.validating = validating;
this.handleXInclude = handleXInclude;
if (!validating) {
features = new Hashtable(handleXInclude ? defaultFeaturesNonValidatingXInclude : defaultFeaturesNonValidatingNoXInclude);
} else {
features = new Hashtable(handleXInclude ? defaultFeaturesValidatingXInclude : defaultFeaturesValidatingNoXInclude);
}
setNamespaceAware(true); // this is needed by some tools in addition to the feature
}
public boolean getFeature(final String key) throws SAXNotRecognizedException {
if (!getRecognizedFeatures().contains(key)) throw new SAXNotRecognizedException(key);
return features.get(key) == Boolean.TRUE;
}
public void setFeature(final String key, final boolean val) throws SAXNotRecognizedException {
if (!getRecognizedFeatures().contains(key)) throw new SAXNotRecognizedException(key);
features.put(key, val ? Boolean.TRUE : Boolean.FALSE);
}
public SAXParser newSAXParser() {
final SAXParser ret;
try {
ret = new XercesJAXPSAXParser(this, features, validating, handleXInclude);
} catch (final SAXException se) {
// Translate to ParserConfigurationException
throw new OXFException(se); // so we see a decent stack trace!
}
return ret;
}
private Collection getRecognizedFeatures() {
if (!validating) {
return handleXInclude ? recognizedFeaturesNonValidatingXInclude : recognizedFeaturesNonValidatingNoXInclude;
} else {
return handleXInclude ? recognizedFeaturesValidatingXInclude : recognizedFeaturesValidatingNoXInclude;
}
}
}
|
./CrossVul/dataset_final_sorted/CWE-264/java/good_4708_0
|
crossvul-java_data_bad_2091_0
| "/*\n * The MIT License\n * \n * Copyright (c) 2004-2011, Sun Microsystems, Inc., Kohsuke Kawaguchi,(...TRUNCATED)
|
./CrossVul/dataset_final_sorted/CWE-264/java/bad_2091_0
|
crossvul-java_data_bad_2293_3
| "/*\n * Copyright 2012 JBoss Inc\n *\n * Licensed under the Apache License, Version 2.0 (the \"Licen(...TRUNCATED)
|
./CrossVul/dataset_final_sorted/CWE-264/java/bad_2293_3
|
crossvul-java_data_good_2091_0
| "/*\n * The MIT License\n * \n * Copyright (c) 2004-2011, Sun Microsystems, Inc., Kohsuke Kawaguchi,(...TRUNCATED)
|
./CrossVul/dataset_final_sorted/CWE-264/java/good_2091_0
|
End of preview. Expand
in Data Studio
README.md exists but content is empty.
- Downloads last month
- 3