method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
sequence | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
sequence | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
---|---|---|---|---|---|---|---|---|---|---|---|
private void updateOffsets() {
if (this.blockAnchor.equals(RectangleAnchor.BOTTOM_LEFT)) {
this.xOffset = 0.0;
this.yOffset = 0.0;
}
else if (this.blockAnchor.equals(RectangleAnchor.BOTTOM)) {
this.xOffset = -this.blockWidth / 2.0;
this.yOffset = 0.0;
}
else if (this.blockAnchor.equals(RectangleAnchor.BOTTOM_RIGHT)) {
this.xOffset = -this.blockWidth;
this.yOffset = 0.0;
}
else if (this.blockAnchor.equals(RectangleAnchor.LEFT)) {
this.xOffset = 0.0;
this.yOffset = -this.blockHeight / 2.0;
}
else if (this.blockAnchor.equals(RectangleAnchor.CENTER)) {
this.xOffset = -this.blockWidth / 2.0;
this.yOffset = -this.blockHeight / 2.0;
}
else if (this.blockAnchor.equals(RectangleAnchor.RIGHT)) {
this.xOffset = -this.blockWidth;
this.yOffset = -this.blockHeight / 2.0;
}
else if (this.blockAnchor.equals(RectangleAnchor.TOP_LEFT)) {
this.xOffset = 0.0;
this.yOffset = -this.blockHeight;
}
else if (this.blockAnchor.equals(RectangleAnchor.TOP)) {
this.xOffset = -this.blockWidth / 2.0;
this.yOffset = -this.blockHeight;
}
else if (this.blockAnchor.equals(RectangleAnchor.TOP_RIGHT)) {
this.xOffset = -this.blockWidth;
this.yOffset = -this.blockHeight;
}
}
| void function() { if (this.blockAnchor.equals(RectangleAnchor.BOTTOM_LEFT)) { this.xOffset = 0.0; this.yOffset = 0.0; } else if (this.blockAnchor.equals(RectangleAnchor.BOTTOM)) { this.xOffset = -this.blockWidth / 2.0; this.yOffset = 0.0; } else if (this.blockAnchor.equals(RectangleAnchor.BOTTOM_RIGHT)) { this.xOffset = -this.blockWidth; this.yOffset = 0.0; } else if (this.blockAnchor.equals(RectangleAnchor.LEFT)) { this.xOffset = 0.0; this.yOffset = -this.blockHeight / 2.0; } else if (this.blockAnchor.equals(RectangleAnchor.CENTER)) { this.xOffset = -this.blockWidth / 2.0; this.yOffset = -this.blockHeight / 2.0; } else if (this.blockAnchor.equals(RectangleAnchor.RIGHT)) { this.xOffset = -this.blockWidth; this.yOffset = -this.blockHeight / 2.0; } else if (this.blockAnchor.equals(RectangleAnchor.TOP_LEFT)) { this.xOffset = 0.0; this.yOffset = -this.blockHeight; } else if (this.blockAnchor.equals(RectangleAnchor.TOP)) { this.xOffset = -this.blockWidth / 2.0; this.yOffset = -this.blockHeight; } else if (this.blockAnchor.equals(RectangleAnchor.TOP_RIGHT)) { this.xOffset = -this.blockWidth; this.yOffset = -this.blockHeight; } } | /**
* Updates the offsets to take into account the block width, height and
* anchor.
*/ | Updates the offsets to take into account the block width, height and anchor | updateOffsets | {
"repo_name": "sebkur/JFreeChart",
"path": "src/main/java/org/jfree/chart/renderer/xy/XYBlockRenderer.java",
"license": "lgpl-3.0",
"size": 15242
} | [
"org.jfree.ui.RectangleAnchor"
] | import org.jfree.ui.RectangleAnchor; | import org.jfree.ui.*; | [
"org.jfree.ui"
] | org.jfree.ui; | 1,773,523 |
static void assertEqualsText(List<CharSequence> expectedText,
List<CharSequence> receivedText ) {
String message = "text has incorrect value";
TestCase.assertEquals(message, expectedText.size(), receivedText.size());
Iterator<CharSequence> expectedTextIterator = expectedText.iterator();
Iterator<CharSequence> receivedTextIterator = receivedText.iterator();
for (int i = 0; i < expectedText.size(); i++) {
// compare the string representation
TestCase.assertEquals(message, expectedTextIterator.next().toString(),
receivedTextIterator.next().toString());
}
} | static void assertEqualsText(List<CharSequence> expectedText, List<CharSequence> receivedText ) { String message = STR; TestCase.assertEquals(message, expectedText.size(), receivedText.size()); Iterator<CharSequence> expectedTextIterator = expectedText.iterator(); Iterator<CharSequence> receivedTextIterator = receivedText.iterator(); for (int i = 0; i < expectedText.size(); i++) { TestCase.assertEquals(message, expectedTextIterator.next().toString(), receivedTextIterator.next().toString()); } } | /**
* Compares the text of the <code>expectedEvent</code> and
* <code>receivedEvent</code> by comparing the string representation of the
* corresponding {@link CharSequence}s.
*/ | Compares the text of the <code>expectedEvent</code> and <code>receivedEvent</code> by comparing the string representation of the corresponding <code>CharSequence</code>s | assertEqualsText | {
"repo_name": "rex-xxx/mt6572_x201",
"path": "cts/tests/tests/accessibility/src/android/view/accessibility/cts/AccessibilityRecordTest.java",
"license": "gpl-2.0",
"size": 10061
} | [
"java.util.Iterator",
"java.util.List",
"junit.framework.TestCase"
] | import java.util.Iterator; import java.util.List; import junit.framework.TestCase; | import java.util.*; import junit.framework.*; | [
"java.util",
"junit.framework"
] | java.util; junit.framework; | 943,267 |
public DiskEntry.RecoveredEntry createRecoveredEntry(byte[] valueBytes, int valueLength,
byte userBits, long oplogId, long offsetInOplog, long oplogKeyId, boolean recoverValue,
KnownVersion version, ByteArrayDataInput in) {
DiskEntry.RecoveredEntry re;
if (recoverValue || EntryBits.isAnyInvalid(userBits) || EntryBits.isTombstone(userBits)) {
Object value;
if (EntryBits.isLocalInvalid(userBits)) {
value = Token.LOCAL_INVALID;
valueLength = 0;
} else if (EntryBits.isInvalid(userBits)) {
value = Token.INVALID;
valueLength = 0;
} else if (EntryBits.isSerialized(userBits)) {
value = DiskEntry.Helper.readSerializedValue(valueBytes, version, in, false,
getParent().getCache());
} else if (EntryBits.isTombstone(userBits)) {
value = Token.TOMBSTONE;
} else {
value = valueBytes;
}
re = new DiskEntry.RecoveredEntry(oplogKeyId, oplogId, offsetInOplog, userBits, valueLength,
value);
} else {
re = new DiskEntry.RecoveredEntry(oplogKeyId, oplogId, offsetInOplog, userBits, valueLength);
}
return re;
} | DiskEntry.RecoveredEntry function(byte[] valueBytes, int valueLength, byte userBits, long oplogId, long offsetInOplog, long oplogKeyId, boolean recoverValue, KnownVersion version, ByteArrayDataInput in) { DiskEntry.RecoveredEntry re; if (recoverValue EntryBits.isAnyInvalid(userBits) EntryBits.isTombstone(userBits)) { Object value; if (EntryBits.isLocalInvalid(userBits)) { value = Token.LOCAL_INVALID; valueLength = 0; } else if (EntryBits.isInvalid(userBits)) { value = Token.INVALID; valueLength = 0; } else if (EntryBits.isSerialized(userBits)) { value = DiskEntry.Helper.readSerializedValue(valueBytes, version, in, false, getParent().getCache()); } else if (EntryBits.isTombstone(userBits)) { value = Token.TOMBSTONE; } else { value = valueBytes; } re = new DiskEntry.RecoveredEntry(oplogKeyId, oplogId, offsetInOplog, userBits, valueLength, value); } else { re = new DiskEntry.RecoveredEntry(oplogKeyId, oplogId, offsetInOplog, userBits, valueLength); } return re; } | /**
* TODO soplog - This method is public just to test soplog recovery
*/ | TODO soplog - This method is public just to test soplog recovery | createRecoveredEntry | {
"repo_name": "jdeppe-pivotal/geode",
"path": "geode-core/src/main/java/org/apache/geode/internal/cache/Oplog.java",
"license": "apache-2.0",
"size": 272626
} | [
"org.apache.geode.internal.cache.entries.DiskEntry",
"org.apache.geode.internal.serialization.ByteArrayDataInput",
"org.apache.geode.internal.serialization.KnownVersion"
] | import org.apache.geode.internal.cache.entries.DiskEntry; import org.apache.geode.internal.serialization.ByteArrayDataInput; import org.apache.geode.internal.serialization.KnownVersion; | import org.apache.geode.internal.cache.entries.*; import org.apache.geode.internal.serialization.*; | [
"org.apache.geode"
] | org.apache.geode; | 2,486,223 |
protected void removeExcessiveInProgressFiles(Deque<Exchange> exchanges, int limit) {
// remove the file from the in progress list in case the batch was limited by max messages per poll
while (exchanges.size() > limit) {
// must remove last
Exchange exchange = exchanges.removeLast();
GenericFile<?> file = exchange.getProperty(FileComponent.FILE_EXCHANGE_FILE, GenericFile.class);
String key = file.getAbsoluteFilePath();
endpoint.getInProgressRepository().remove(key);
}
} | void function(Deque<Exchange> exchanges, int limit) { while (exchanges.size() > limit) { Exchange exchange = exchanges.removeLast(); GenericFile<?> file = exchange.getProperty(FileComponent.FILE_EXCHANGE_FILE, GenericFile.class); String key = file.getAbsoluteFilePath(); endpoint.getInProgressRepository().remove(key); } } | /**
* Drain any in progress files as we are done with this batch
*
* @param exchanges the exchanges
* @param limit the limit
*/ | Drain any in progress files as we are done with this batch | removeExcessiveInProgressFiles | {
"repo_name": "shuliangtao/apache-camel-2.13.0-src",
"path": "camel-core/src/main/java/org/apache/camel/component/file/GenericFileConsumer.java",
"license": "apache-2.0",
"size": 25569
} | [
"java.util.Deque",
"org.apache.camel.Exchange"
] | import java.util.Deque; import org.apache.camel.Exchange; | import java.util.*; import org.apache.camel.*; | [
"java.util",
"org.apache.camel"
] | java.util; org.apache.camel; | 1,704,923 |
@SideOnly(Side.CLIENT)
public double getYCoordinate()
{
return (double)this.yCoord;
} | @SideOnly(Side.CLIENT) double function() { return (double)this.yCoord; } | /**
* Gets the y coordinate to spawn the particle.
*/ | Gets the y coordinate to spawn the particle | getYCoordinate | {
"repo_name": "tomtomtom09/CampCraft",
"path": "build/tmp/recompileMc/sources/net/minecraft/network/play/server/S2APacketParticles.java",
"license": "gpl-3.0",
"size": 5553
} | [
"net.minecraftforge.fml.relauncher.Side",
"net.minecraftforge.fml.relauncher.SideOnly"
] | import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; | import net.minecraftforge.fml.relauncher.*; | [
"net.minecraftforge.fml"
] | net.minecraftforge.fml; | 2,755,610 |
String createEnglishMailText(ImportRun importRun, ZoneId zone)
{
ZonedDateTime start = importRun.getStartDate().atZone(zone);
ZonedDateTime end = importRun.getEndDate().atZone(zone);
String startDateTimeString = ofLocalizedDateTime(FULL).withLocale(ENGLISH).format(start);
String endTimeString = ofLocalizedTime(MEDIUM).withLocale(ENGLISH).format(end);
return String.format("The import started by you on %1s finished on %2s with status: %3s\nMessage:\n%4s",
startDateTimeString, endTimeString, importRun.getStatus(), importRun.getMessage());
} | String createEnglishMailText(ImportRun importRun, ZoneId zone) { ZonedDateTime start = importRun.getStartDate().atZone(zone); ZonedDateTime end = importRun.getEndDate().atZone(zone); String startDateTimeString = ofLocalizedDateTime(FULL).withLocale(ENGLISH).format(start); String endTimeString = ofLocalizedTime(MEDIUM).withLocale(ENGLISH).format(end); return String.format(STR, startDateTimeString, endTimeString, importRun.getStatus(), importRun.getMessage()); } | /**
* Creates an English mail message describing a finished {@link ImportRun}.
* Formats the run's start and end times using {@link ZoneId#systemDefault()}.
*
* @param importRun the ImportRun to describe, it should have non-null start and end dates.
* @return String containing the mail message.
*/ | Creates an English mail message describing a finished <code>ImportRun</code>. Formats the run's start and end times using <code>ZoneId#systemDefault()</code> | createEnglishMailText | {
"repo_name": "djvanenckevort/molgenis",
"path": "molgenis-data-import/src/main/java/org/molgenis/data/importer/ImportRunService.java",
"license": "lgpl-3.0",
"size": 4754
} | [
"java.time.ZoneId",
"java.time.ZonedDateTime",
"java.time.format.DateTimeFormatter"
] | import java.time.ZoneId; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; | import java.time.*; import java.time.format.*; | [
"java.time"
] | java.time; | 262,033 |
void enterNumber(@NotNull EsperEPL2GrammarParser.NumberContext ctx);
void exitNumber(@NotNull EsperEPL2GrammarParser.NumberContext ctx); | void enterNumber(@NotNull EsperEPL2GrammarParser.NumberContext ctx); void exitNumber(@NotNull EsperEPL2GrammarParser.NumberContext ctx); | /**
* Exit a parse tree produced by {@link EsperEPL2GrammarParser#number}.
* @param ctx the parse tree
*/ | Exit a parse tree produced by <code>EsperEPL2GrammarParser#number</code> | exitNumber | {
"repo_name": "georgenicoll/esper",
"path": "esper/src/main/java/com/espertech/esper/epl/generated/EsperEPL2GrammarListener.java",
"license": "gpl-2.0",
"size": 114105
} | [
"org.antlr.v4.runtime.misc.NotNull"
] | import org.antlr.v4.runtime.misc.NotNull; | import org.antlr.v4.runtime.misc.*; | [
"org.antlr.v4"
] | org.antlr.v4; | 2,636,906 |
private void checkAbstract(@NotNull final String... methodNames) {
final String[] modules = {"Class", "SuperClass"};
configureMultiFile(modules);
doPullUp("Child", "Parent", true, methodNames);
checkMultiFile(modules);
} | void function(@NotNull final String... methodNames) { final String[] modules = {"Class", STR}; configureMultiFile(modules); doPullUp("Child", STR, true, methodNames); checkMultiFile(modules); } | /**
* Moves methods fromn Child to Parent and make them abstract
* @param methodNames methods to check
*/ | Moves methods fromn Child to Parent and make them abstract | checkAbstract | {
"repo_name": "ivan-fedorov/intellij-community",
"path": "python/testSrc/com/jetbrains/python/refactoring/classes/pullUp/PyPullUpTest.java",
"license": "apache-2.0",
"size": 6556
} | [
"org.jetbrains.annotations.NotNull"
] | import org.jetbrains.annotations.NotNull; | import org.jetbrains.annotations.*; | [
"org.jetbrains.annotations"
] | org.jetbrains.annotations; | 2,179,672 |
private void rewriteQueryFromSuggestion(int position) {
CharSequence oldQuery = mQueryTextView.getText();
Cursor c = mSuggestionsAdapter.getCursor();
if (c == null) {
return;
}
if (c.moveToPosition(position)) {
// Get the new query from the suggestion.
CharSequence newQuery = mSuggestionsAdapter.convertToString(c);
if (newQuery != null) {
// The suggestion rewrites the query.
// Update the text field, without getting new suggestions.
setQuery(newQuery);
} else {
// The suggestion does not rewrite the query, restore the user's query.
setQuery(oldQuery);
}
} else {
// We got a bad position, restore the user's query.
setQuery(oldQuery);
}
} | void function(int position) { CharSequence oldQuery = mQueryTextView.getText(); Cursor c = mSuggestionsAdapter.getCursor(); if (c == null) { return; } if (c.moveToPosition(position)) { CharSequence newQuery = mSuggestionsAdapter.convertToString(c); if (newQuery != null) { setQuery(newQuery); } else { setQuery(oldQuery); } } else { setQuery(oldQuery); } } | /**
* Query rewriting.
*/ | Query rewriting | rewriteQueryFromSuggestion | {
"repo_name": "Myanmar-Hub/collabra-devcon",
"path": "Sherlock/src/com/actionbarsherlock/widget/SearchView.java",
"license": "apache-2.0",
"size": 71352
} | [
"android.database.Cursor"
] | import android.database.Cursor; | import android.database.*; | [
"android.database"
] | android.database; | 2,236,129 |
public EAttribute getClearanceTag_DeenergizeReqFlag() {
return (EAttribute)getClearanceTag().getEStructuralFeatures().get(2);
} | EAttribute function() { return (EAttribute)getClearanceTag().getEStructuralFeatures().get(2); } | /**
* Returns the meta object for the attribute '{@link CIM15.IEC61970.Outage.ClearanceTag#isDeenergizeReqFlag <em>Deenergize Req Flag</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute '<em>Deenergize Req Flag</em>'.
* @see CIM15.IEC61970.Outage.ClearanceTag#isDeenergizeReqFlag()
* @see #getClearanceTag()
* @generated
*/ | Returns the meta object for the attribute '<code>CIM15.IEC61970.Outage.ClearanceTag#isDeenergizeReqFlag Deenergize Req Flag</code>'. | getClearanceTag_DeenergizeReqFlag | {
"repo_name": "SES-fortiss/SmartGridCoSimulation",
"path": "core/cim15/src/CIM15/IEC61970/Outage/OutagePackage.java",
"license": "apache-2.0",
"size": 60227
} | [
"org.eclipse.emf.ecore.EAttribute"
] | import org.eclipse.emf.ecore.EAttribute; | import org.eclipse.emf.ecore.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 2,568,625 |
@Test()
public void testConstructor3WithoutAttribute()
throws Exception
{
ASN1Element[] elements =
{
new ASN1Enumerated(0)
};
ServerSideSortResponseControl c =
new ServerSideSortResponseControl("1.2.840.113556.1.4.474", false,
new ASN1OctetString(new ASN1Sequence(elements).encode()));
assertEquals(c.getResultCode(), ResultCode.SUCCESS);
assertNull(c.getAttributeName());
assertFalse(c.isCritical());
assertNotNull(c.getControlName());
assertNotNull(c.toString());
} | @Test() void function() throws Exception { ASN1Element[] elements = { new ASN1Enumerated(0) }; ServerSideSortResponseControl c = new ServerSideSortResponseControl(STR, false, new ASN1OctetString(new ASN1Sequence(elements).encode())); assertEquals(c.getResultCode(), ResultCode.SUCCESS); assertNull(c.getAttributeName()); assertFalse(c.isCritical()); assertNotNull(c.getControlName()); assertNotNull(c.toString()); } | /**
* Tests the third constructor with a control that does not have an attribute.
*
* @throws Exception If an unexpected problem occurs.
*/ | Tests the third constructor with a control that does not have an attribute | testConstructor3WithoutAttribute | {
"repo_name": "UnboundID/ldapsdk",
"path": "tests/unit/src/com/unboundid/ldap/sdk/controls/ServerSideSortResponseControlTestCase.java",
"license": "gpl-2.0",
"size": 9234
} | [
"com.unboundid.asn1.ASN1Element",
"com.unboundid.asn1.ASN1Enumerated",
"com.unboundid.asn1.ASN1OctetString",
"com.unboundid.asn1.ASN1Sequence",
"com.unboundid.ldap.sdk.ResultCode",
"org.testng.annotations.Test"
] | import com.unboundid.asn1.ASN1Element; import com.unboundid.asn1.ASN1Enumerated; import com.unboundid.asn1.ASN1OctetString; import com.unboundid.asn1.ASN1Sequence; import com.unboundid.ldap.sdk.ResultCode; import org.testng.annotations.Test; | import com.unboundid.asn1.*; import com.unboundid.ldap.sdk.*; import org.testng.annotations.*; | [
"com.unboundid.asn1",
"com.unboundid.ldap",
"org.testng.annotations"
] | com.unboundid.asn1; com.unboundid.ldap; org.testng.annotations; | 1,174,148 |
public List<top.zbeboy.isy.domain.tables.pojos.InternshipChangeCompanyHistory> fetchByCompanyTel(String... values) {
return fetch(InternshipChangeCompanyHistory.INTERNSHIP_CHANGE_COMPANY_HISTORY.COMPANY_TEL, values);
} | List<top.zbeboy.isy.domain.tables.pojos.InternshipChangeCompanyHistory> function(String... values) { return fetch(InternshipChangeCompanyHistory.INTERNSHIP_CHANGE_COMPANY_HISTORY.COMPANY_TEL, values); } | /**
* Fetch records that have <code>company_tel IN (values)</code>
*/ | Fetch records that have <code>company_tel IN (values)</code> | fetchByCompanyTel | {
"repo_name": "zbeboy/ISY",
"path": "src/main/java/top/zbeboy/isy/domain/tables/daos/InternshipChangeCompanyHistoryDao.java",
"license": "mit",
"size": 4913
} | [
"java.util.List",
"top.zbeboy.isy.domain.tables.InternshipChangeCompanyHistory"
] | import java.util.List; import top.zbeboy.isy.domain.tables.InternshipChangeCompanyHistory; | import java.util.*; import top.zbeboy.isy.domain.tables.*; | [
"java.util",
"top.zbeboy.isy"
] | java.util; top.zbeboy.isy; | 1,805,523 |
public void getFilter(final String userId, final String filterId, final ApiCallback<FilterBody> callback) {
final String description = "getFilter userId : " + userId + " filterId : " + filterId; | void function(final String userId, final String filterId, final ApiCallback<FilterBody> callback) { final String description = STR + userId + STR + filterId; | /**
* Get a user's filter by filterId
*
* @param userId the user id
* @param filterId the filter id
* @param callback on success callback containing a User object with populated filterbody
*/ | Get a user's filter by filterId | getFilter | {
"repo_name": "matrix-org/matrix-android-sdk",
"path": "matrix-sdk/src/main/java/org/matrix/androidsdk/rest/client/FilterRestClient.java",
"license": "apache-2.0",
"size": 3091
} | [
"org.matrix.androidsdk.core.callback.ApiCallback",
"org.matrix.androidsdk.rest.model.filter.FilterBody"
] | import org.matrix.androidsdk.core.callback.ApiCallback; import org.matrix.androidsdk.rest.model.filter.FilterBody; | import org.matrix.androidsdk.core.callback.*; import org.matrix.androidsdk.rest.model.filter.*; | [
"org.matrix.androidsdk"
] | org.matrix.androidsdk; | 2,115,353 |
@FlakyTest
public void testDangerousDownload() throws Exception {
loadUrl(TestHttpServerClient.getUrl("chrome/test/data/android/download/dangerous.html"));
waitForFocus();
View currentView = getActivity().getActivityTab().getView();
singleClickView(currentView);
assertPollForInfoBarSize(1);
assertTrue("OK button wasn't found", InfoBarUtil.clickPrimaryButton(getInfoBars().get(0)));
assertTrue(waitForGetDownloadToFinish());
checkLastDownload("test.apk");
} | void function() throws Exception { loadUrl(TestHttpServerClient.getUrl(STR)); waitForFocus(); View currentView = getActivity().getActivityTab().getView(); singleClickView(currentView); assertPollForInfoBarSize(1); assertTrue(STR, InfoBarUtil.clickPrimaryButton(getInfoBars().get(0))); assertTrue(waitForGetDownloadToFinish()); checkLastDownload(STR); } | /**
* Bug http://crbug/286315
*
* @MediumTest
* @Feature({"Downloads"})
*/ | Bug HREF | testDangerousDownload | {
"repo_name": "CapOM/ChromiumGStreamerBackend",
"path": "chrome/android/javatests/src/org/chromium/chrome/browser/download/DownloadTest.java",
"license": "bsd-3-clause",
"size": 15687
} | [
"android.view.View",
"org.chromium.chrome.test.util.InfoBarUtil",
"org.chromium.chrome.test.util.TestHttpServerClient"
] | import android.view.View; import org.chromium.chrome.test.util.InfoBarUtil; import org.chromium.chrome.test.util.TestHttpServerClient; | import android.view.*; import org.chromium.chrome.test.util.*; | [
"android.view",
"org.chromium.chrome"
] | android.view; org.chromium.chrome; | 465,115 |
@Test
public void testBoolean2Float()
{
try
{
Message message = senderSession.createMessage();
// store a value that can't be converted to float
message.setBooleanProperty("prop", true);
message.getFloatProperty("prop");
Assert.fail("sec. 3.5.4 The unmarked cases [of Table 0-4] should raise a JMS MessageFormatException.\n");
}
catch (MessageFormatException e)
{
}
catch (JMSException e)
{
fail(e);
}
} | void function() { try { Message message = senderSession.createMessage(); message.setBooleanProperty("prop", true); message.getFloatProperty("prop"); Assert.fail(STR); } catch (MessageFormatException e) { } catch (JMSException e) { fail(e); } } | /**
* if a property is set as a <code>boolean</code>,
* to get is as a <code>float</code> throws a <code>javax.jms.MessageFormatException</code>.
*/ | if a property is set as a <code>boolean</code>, to get is as a <code>float</code> throws a <code>javax.jms.MessageFormatException</code> | testBoolean2Float | {
"repo_name": "jbertram/activemq-artemis-old",
"path": "tests/joram-tests/src/test/java/org/objectweb/jtests/jms/conform/message/properties/MessagePropertyConversionTest.java",
"license": "apache-2.0",
"size": 45746
} | [
"javax.jms.JMSException",
"javax.jms.Message",
"javax.jms.MessageFormatException",
"org.junit.Assert"
] | import javax.jms.JMSException; import javax.jms.Message; import javax.jms.MessageFormatException; import org.junit.Assert; | import javax.jms.*; import org.junit.*; | [
"javax.jms",
"org.junit"
] | javax.jms; org.junit; | 2,457,758 |
protected byte[] updatePageArray(long pageNumber,
byte[] pageData,
byte[] encryptionBuf,
boolean encryptWithNewEngine)
throws StandardException, IOException
{
if (pageNumber == FIRST_ALLOC_PAGE_NUMBER)
{
// write header into the alloc page array regardless of dirty
// bit because the alloc page have zero'ed out the borrowed
// space
writeHeader(getIdentity(), pageData);
if (SanityManager.DEBUG)
{
if (FormatIdUtil.readFormatIdInteger(pageData) != AllocPage.FORMAT_NUMBER)
SanityManager.THROWASSERT(
"expect " +
AllocPage.FORMAT_NUMBER +
"got " +
FormatIdUtil.readFormatIdInteger(pageData));
}
return pageData;
}
else
{
if (encryptionBuf != null &&
(dataFactory.databaseEncrypted() || encryptWithNewEngine))
{
return encryptPage(pageData,
pageSize,
encryptionBuf,
encryptWithNewEngine);
}
else
{
return pageData;
}
}
} | byte[] function(long pageNumber, byte[] pageData, byte[] encryptionBuf, boolean encryptWithNewEngine) throws StandardException, IOException { if (pageNumber == FIRST_ALLOC_PAGE_NUMBER) { writeHeader(getIdentity(), pageData); if (SanityManager.DEBUG) { if (FormatIdUtil.readFormatIdInteger(pageData) != AllocPage.FORMAT_NUMBER) SanityManager.THROWASSERT( STR + AllocPage.FORMAT_NUMBER + STR + FormatIdUtil.readFormatIdInteger(pageData)); } return pageData; } else { if (encryptionBuf != null && (dataFactory.databaseEncrypted() encryptWithNewEngine)) { return encryptPage(pageData, pageSize, encryptionBuf, encryptWithNewEngine); } else { return pageData; } } } | /**
* Updates the page array with container header if the page is a first
* allocation page and encrypts the page data if the database is encrypted.
*
* @param pageNumber the page number of the page
* @param pageData byte array that has the actual page data.
* @param encryptionBuf buffer that is used to store encrypted version of
* the page, or {@code null} if encryption is to be skipped
* @param encryptWithNewEngine whether to use the new encryption engine for
* encryption (only considered if {@code encryptionBuf != null})
* @return byte array of the the page data as it should be on the disk.
*/ | Updates the page array with container header if the page is a first allocation page and encrypts the page data if the database is encrypted | updatePageArray | {
"repo_name": "apache/derby",
"path": "java/org.apache.derby.engine/org/apache/derby/impl/store/raw/data/RAFContainer.java",
"license": "apache-2.0",
"size": 59993
} | [
"java.io.IOException",
"org.apache.derby.iapi.services.io.FormatIdUtil",
"org.apache.derby.shared.common.error.StandardException",
"org.apache.derby.shared.common.sanity.SanityManager"
] | import java.io.IOException; import org.apache.derby.iapi.services.io.FormatIdUtil; import org.apache.derby.shared.common.error.StandardException; import org.apache.derby.shared.common.sanity.SanityManager; | import java.io.*; import org.apache.derby.iapi.services.io.*; import org.apache.derby.shared.common.error.*; import org.apache.derby.shared.common.sanity.*; | [
"java.io",
"org.apache.derby"
] | java.io; org.apache.derby; | 1,418,369 |
private void writeFloatTagPayload(FloatTag tag) throws IOException {
os.writeFloat(tag.getValue());
} | void function(FloatTag tag) throws IOException { os.writeFloat(tag.getValue()); } | /**
* Writes a <code>TAG_Float</code> tag.
* @param tag The tag.
* @throws IOException if an I/O error occurs.
*/ | Writes a <code>TAG_Float</code> tag | writeFloatTagPayload | {
"repo_name": "GoldBigDragon/GoldBigDragonRPG",
"path": "[Spigot 1.8.8]/OtherPlugins/OtherPlugins/jnbt/NBTOutputStream.java",
"license": "gpl-2.0",
"size": 7471
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 580,565 |
Bitmap bitmapFromPath = null;
try {
bitmapFromPath = BitmapFactory.decodeFile(path);
} catch (Exception e) {
// TODO: handle exception
e.printStackTrace();
}
return bitmapFromPath;
} | Bitmap bitmapFromPath = null; try { bitmapFromPath = BitmapFactory.decodeFile(path); } catch (Exception e) { e.printStackTrace(); } return bitmapFromPath; } | /**
* Decodes the Bitmap from 'path' and returns it
* @param path image path
* @return the Bitmap from 'path'
*/ | Decodes the Bitmap from 'path' and returns it | getImage | {
"repo_name": "lidox/reaction-test",
"path": "ReactionTest/app/src/main/java/com/artursworld/reactiontest/model/util/TinyDB.java",
"license": "mit",
"size": 17323
} | [
"android.graphics.Bitmap",
"android.graphics.BitmapFactory"
] | import android.graphics.Bitmap; import android.graphics.BitmapFactory; | import android.graphics.*; | [
"android.graphics"
] | android.graphics; | 2,529,301 |
public void setUserLanguageCode(String code) {
user.setLanguage(new Locale(code == null ? null : code.trim()));
}
| void function(String code) { user.setLanguage(new Locale(code == null ? null : code.trim())); } | /**
* Sets the language code.
*
* @param code
* the new language code
*/ | Sets the language code | setUserLanguageCode | {
"repo_name": "Communote/communote-server",
"path": "communote/webapp/src/main/java/com/communote/server/web/fe/installer/forms/InstallerForm.java",
"license": "apache-2.0",
"size": 13925
} | [
"java.util.Locale"
] | import java.util.Locale; | import java.util.*; | [
"java.util"
] | java.util; | 1,323,232 |
public void setCreated(final DateTime created) {
this.created = created;
} | void function(final DateTime created) { this.created = created; } | /**
* For internal use only.
*
* @param created DateTime of the creation of the Offer
*/ | For internal use only | setCreated | {
"repo_name": "IWSDevelopers/iws",
"path": "iws-api/src/main/java/net/iaeste/iws/api/dtos/exchange/OfferGroup.java",
"license": "apache-2.0",
"size": 5689
} | [
"net.iaeste.iws.api.util.DateTime"
] | import net.iaeste.iws.api.util.DateTime; | import net.iaeste.iws.api.util.*; | [
"net.iaeste.iws"
] | net.iaeste.iws; | 1,002,931 |
Stack<Command> commandsToExecute = MasterController.myParser.parseInput(input);
if (!commandsToExecute.isEmpty()
&& commandsToExecute.peek().getClassName().equals("commands.ErrorCommand")) {
ErrorCommand error = (ErrorCommand)commandsToExecute.pop();
myPane.showError(error.showError());
return;
}
runCommand(commandsToExecute);
} | Stack<Command> commandsToExecute = MasterController.myParser.parseInput(input); if (!commandsToExecute.isEmpty() && commandsToExecute.peek().getClassName().equals(STR)) { ErrorCommand error = (ErrorCommand)commandsToExecute.pop(); myPane.showError(error.showError()); return; } runCommand(commandsToExecute); } | /**
* Accepts a string from the ViewPanel, parses it using a static parser in
* the MasterController class, and searches the returned collection of
* commands for errors. If an error is found, it is displayed in the view,
* and no commands are executed. Otherwise, all commands are run in
* sequence.
*
* @param input
* : User input string from the ViewPanel.
*/ | Accepts a string from the ViewPanel, parses it using a static parser in the MasterController class, and searches the returned collection of commands for errors. If an error is found, it is displayed in the view, and no commands are executed. Otherwise, all commands are run in sequence | getInput | {
"repo_name": "aj148/slogo",
"path": "src/controller/Controller.java",
"license": "mit",
"size": 2189
} | [
"java.util.Stack"
] | import java.util.Stack; | import java.util.*; | [
"java.util"
] | java.util; | 352,876 |
@FIXVersion(introduced="4.0")
@TagNumRef(tagNum=TagNum.PositionEffect)
public PositionEffect getPositionEffect() {
return positionEffect;
} | @FIXVersion(introduced="4.0") @TagNumRef(tagNum=TagNum.PositionEffect) PositionEffect function() { return positionEffect; } | /**
* Message field getter.
* @return field value
*/ | Message field getter | getPositionEffect | {
"repo_name": "marvisan/HadesFIX",
"path": "Model/src/main/java/net/hades/fix/message/AllocationInstructionMsg.java",
"license": "gpl-3.0",
"size": 122626
} | [
"net.hades.fix.message.anno.FIXVersion",
"net.hades.fix.message.anno.TagNumRef",
"net.hades.fix.message.type.PositionEffect",
"net.hades.fix.message.type.TagNum"
] | import net.hades.fix.message.anno.FIXVersion; import net.hades.fix.message.anno.TagNumRef; import net.hades.fix.message.type.PositionEffect; import net.hades.fix.message.type.TagNum; | import net.hades.fix.message.anno.*; import net.hades.fix.message.type.*; | [
"net.hades.fix"
] | net.hades.fix; | 1,939,885 |
public double[] getValuesEx(int typeCode, int samplesToSkip, int resultSize) {
double[] result = new double[resultSize];
int firstInterval = 0;
int idx = 0;
while (samplesToSkip > 0
&& firstInterval <= intervalIdx
&& intervals[firstInterval].getSampleCount() <= samplesToSkip) {
samplesToSkip -= intervals[firstInterval].getSampleCount();
firstInterval++;
}
for (int i=firstInterval; i <= intervalIdx; i++) {
idx += intervals[i].fill(result, idx, typeCode, samplesToSkip);
samplesToSkip = 0;
}
if (currentCount != 0) {
idx += BitInterval.create(currentStartBits, currentInterval, currentCount).fill(result, idx, typeCode, samplesToSkip);
}
// assert
if (idx != resultSize) {
throw new InternalGemFireException(LocalizedStrings.StatArchiveReader_GETVALUESEX_DIDNT_FILL_THE_LAST_0_ENTRIES_OF_ITS_RESULT.toLocalizedString(Integer.valueOf(resultSize-idx)));
}
return result;
} | double[] function(int typeCode, int samplesToSkip, int resultSize) { double[] result = new double[resultSize]; int firstInterval = 0; int idx = 0; while (samplesToSkip > 0 && firstInterval <= intervalIdx && intervals[firstInterval].getSampleCount() <= samplesToSkip) { samplesToSkip -= intervals[firstInterval].getSampleCount(); firstInterval++; } for (int i=firstInterval; i <= intervalIdx; i++) { idx += intervals[i].fill(result, idx, typeCode, samplesToSkip); samplesToSkip = 0; } if (currentCount != 0) { idx += BitInterval.create(currentStartBits, currentInterval, currentCount).fill(result, idx, typeCode, samplesToSkip); } if (idx != resultSize) { throw new InternalGemFireException(LocalizedStrings.StatArchiveReader_GETVALUESEX_DIDNT_FILL_THE_LAST_0_ENTRIES_OF_ITS_RESULT.toLocalizedString(Integer.valueOf(resultSize-idx))); } return result; } | /**
* Gets the first "resultSize" values of this series
* skipping over the first "samplesToSkip" ones.
* The first value in a series is at index 0.
* The maximum result size can be obtained by calling "getSize()".
*/ | Gets the first "resultSize" values of this series skipping over the first "samplesToSkip" ones. The first value in a series is at index 0. The maximum result size can be obtained by calling "getSize()" | getValuesEx | {
"repo_name": "ameybarve15/incubator-geode",
"path": "gemfire-core/src/main/java/com/gemstone/gemfire/internal/StatArchiveReader.java",
"license": "apache-2.0",
"size": 110157
} | [
"com.gemstone.gemfire.InternalGemFireException",
"com.gemstone.gemfire.internal.i18n.LocalizedStrings"
] | import com.gemstone.gemfire.InternalGemFireException; import com.gemstone.gemfire.internal.i18n.LocalizedStrings; | import com.gemstone.gemfire.*; import com.gemstone.gemfire.internal.i18n.*; | [
"com.gemstone.gemfire"
] | com.gemstone.gemfire; | 2,721,453 |
public void loadSettings() {
jtfSettingsHost.setText(preferences.get("db_host", DefaultDbParams.HOST.toString()));
jtfSettingsPort.setText(preferences.get("db_port", DefaultDbParams.PORT.toString()));
jtfSettingsDatabase.setText(preferences.get("db_database", DefaultDbParams.DATABASE.toString()));
jtfSettingsUsername.setText(preferences.get("db_username", DefaultDbParams.USERNAME.toString()));
jpfSettingsPassword.setText(preferences.get("db_password", DefaultDbParams.PASSWORD.toString()));
} | void function() { jtfSettingsHost.setText(preferences.get(STR, DefaultDbParams.HOST.toString())); jtfSettingsPort.setText(preferences.get(STR, DefaultDbParams.PORT.toString())); jtfSettingsDatabase.setText(preferences.get(STR, DefaultDbParams.DATABASE.toString())); jtfSettingsUsername.setText(preferences.get(STR, DefaultDbParams.USERNAME.toString())); jpfSettingsPassword.setText(preferences.get(STR, DefaultDbParams.PASSWORD.toString())); } | /**
* Load values from properties and set them into setting fields
*/ | Load values from properties and set them into setting fields | loadSettings | {
"repo_name": "mefi/JKuuza",
"path": "src/main/java/com/github/mefi/jkuuza/gui/AppView.java",
"license": "apache-2.0",
"size": 128106
} | [
"com.github.mefi.jkuuza.app.db.DefaultDbParams"
] | import com.github.mefi.jkuuza.app.db.DefaultDbParams; | import com.github.mefi.jkuuza.app.db.*; | [
"com.github.mefi"
] | com.github.mefi; | 250,759 |
@Test
public void testResetOffsetsWhenOffByOne() {
// Kafka namespace setup
this.topicName = "testResetOffsetsWhenOffByOne" + System.currentTimeMillis();
final int numberOfPartitions = 2;
final int numberOfMsgsOnPartition0 = 0;
final int numberOfMsgsOnPartition1 = 4;
// Define our namespace/partitions
final ConsumerPartition partition0 = new ConsumerPartition(topicName, 0);
final TopicPartition topicPartition0 = new TopicPartition(partition0.namespace(), partition0.partition());
final ConsumerPartition partition1 = new ConsumerPartition(topicName, 1);
final TopicPartition topicPartition1 = new TopicPartition(partition1.namespace(), partition1.partition());
// Create our multi-partition namespace.
getKafkaTestUtils().createTopic(topicName, numberOfPartitions, (short) 1);
// Produce messages into partition1
produceRecords(numberOfMsgsOnPartition1, partition1.partition());
// Setup our config set to reset to none
// We should handle this internally now.
final Map<String, Object> config = getDefaultConfig(topicName);
// Create our Persistence Manager
final PersistenceAdapter persistenceAdapter = new InMemoryPersistenceAdapter();
persistenceAdapter.open(new HashMap<>());
// Move our persisted state to the end of the log, this is where the consumer will begin consuming from
persistenceAdapter.persistConsumerState("MyConsumerId", 1, numberOfMsgsOnPartition1);
// Create our consumer
final Consumer consumer = new Consumer();
consumer.open(config, getDefaultVSpoutId(), getDefaultConsumerCohortDefinition(), persistenceAdapter, new LogRecorder(), null);
// We are at the end of the log, so this should yield NULL every time, there's nothing after our offset
final Record record1 = consumer.nextRecord();
assertNull(record1, "Consumer should not find a record");
assertEquals(
numberOfMsgsOnPartition1,
consumer.getKafkaConsumer().position(topicPartition1),
"Kafka's position should not match the total number of messages on the partition since we are at the end of it"
);
// Seek the consumer past the end of the log, this should create an OutOfRangeException
consumer.getKafkaConsumer().seek(
topicPartition1,
numberOfMsgsOnPartition1 + 1
);
assertEquals(
numberOfMsgsOnPartition1 + 1,
consumer.getKafkaConsumer().position(topicPartition1),
"Seek call on Kafka should be past the end of our messages"
);
// Now attempt to consume a message, the pointer for kafka is past the end of the log so this is going to
// generate an exception which we will catch, and if everything is working correctly we will reset it to the last
// valid offset that we processed
consumer.nextRecord();
assertEquals(
numberOfMsgsOnPartition1,
consumer.getKafkaConsumer().position(topicPartition1),
"Seek call on Kafka should have been reset to our last message"
);
// Clean up
consumer.close();
} | void function() { this.topicName = STR + System.currentTimeMillis(); final int numberOfPartitions = 2; final int numberOfMsgsOnPartition0 = 0; final int numberOfMsgsOnPartition1 = 4; final ConsumerPartition partition0 = new ConsumerPartition(topicName, 0); final TopicPartition topicPartition0 = new TopicPartition(partition0.namespace(), partition0.partition()); final ConsumerPartition partition1 = new ConsumerPartition(topicName, 1); final TopicPartition topicPartition1 = new TopicPartition(partition1.namespace(), partition1.partition()); getKafkaTestUtils().createTopic(topicName, numberOfPartitions, (short) 1); produceRecords(numberOfMsgsOnPartition1, partition1.partition()); final Map<String, Object> config = getDefaultConfig(topicName); final PersistenceAdapter persistenceAdapter = new InMemoryPersistenceAdapter(); persistenceAdapter.open(new HashMap<>()); persistenceAdapter.persistConsumerState(STR, 1, numberOfMsgsOnPartition1); final Consumer consumer = new Consumer(); consumer.open(config, getDefaultVSpoutId(), getDefaultConsumerCohortDefinition(), persistenceAdapter, new LogRecorder(), null); final Record record1 = consumer.nextRecord(); assertNull(record1, STR); assertEquals( numberOfMsgsOnPartition1, consumer.getKafkaConsumer().position(topicPartition1), STR ); consumer.getKafkaConsumer().seek( topicPartition1, numberOfMsgsOnPartition1 + 1 ); assertEquals( numberOfMsgsOnPartition1 + 1, consumer.getKafkaConsumer().position(topicPartition1), STR ); consumer.nextRecord(); assertEquals( numberOfMsgsOnPartition1, consumer.getKafkaConsumer().position(topicPartition1), STR ); consumer.close(); } | /**
* This is a test for a weird edge case we hit in production where the consumer seeks past where we are supposed to
* be, and so we move the pointer back to where we think is valid, rather than resetting to the head of the log.
*/ | This is a test for a weird edge case we hit in production where the consumer seeks past where we are supposed to be, and so we move the pointer back to where we think is valid, rather than resetting to the head of the log | testResetOffsetsWhenOffByOne | {
"repo_name": "salesforce/storm-dynamic-spout",
"path": "src/test/java/com/salesforce/storm/spout/dynamic/kafka/ConsumerTest.java",
"license": "bsd-3-clause",
"size": 123608
} | [
"com.salesforce.storm.spout.dynamic.ConsumerPartition",
"com.salesforce.storm.spout.dynamic.consumer.Record",
"com.salesforce.storm.spout.dynamic.metrics.LogRecorder",
"com.salesforce.storm.spout.dynamic.persistence.InMemoryPersistenceAdapter",
"com.salesforce.storm.spout.dynamic.persistence.PersistenceAdapter",
"java.util.HashMap",
"java.util.Map",
"org.apache.kafka.common.TopicPartition",
"org.junit.jupiter.api.Assertions"
] | import com.salesforce.storm.spout.dynamic.ConsumerPartition; import com.salesforce.storm.spout.dynamic.consumer.Record; import com.salesforce.storm.spout.dynamic.metrics.LogRecorder; import com.salesforce.storm.spout.dynamic.persistence.InMemoryPersistenceAdapter; import com.salesforce.storm.spout.dynamic.persistence.PersistenceAdapter; import java.util.HashMap; import java.util.Map; import org.apache.kafka.common.TopicPartition; import org.junit.jupiter.api.Assertions; | import com.salesforce.storm.spout.dynamic.*; import com.salesforce.storm.spout.dynamic.consumer.*; import com.salesforce.storm.spout.dynamic.metrics.*; import com.salesforce.storm.spout.dynamic.persistence.*; import java.util.*; import org.apache.kafka.common.*; import org.junit.jupiter.api.*; | [
"com.salesforce.storm",
"java.util",
"org.apache.kafka",
"org.junit.jupiter"
] | com.salesforce.storm; java.util; org.apache.kafka; org.junit.jupiter; | 1,745,101 |
public int getUltimoNivelCuentas() {
List lis_nivel_max = utilitario.getConexion().consultar("select max (ide_cnncu) from con_det_plan_cuen dpc where ide_empr=" + utilitario.getVariable("ide_empr") + " and ide_cncpc=" + getPlandeCuentasActivo());
if (lis_nivel_max != null) {
return Integer.parseInt(lis_nivel_max.get(0).toString());
} else {
return -1;
}
}
| int function() { List lis_nivel_max = utilitario.getConexion().consultar(STR + utilitario.getVariable(STR) + STR + getPlandeCuentasActivo()); if (lis_nivel_max != null) { return Integer.parseInt(lis_nivel_max.get(0).toString()); } else { return -1; } } | /**
* Retorna el ultimo nivel del plan de cuentas
*
* @return
*/ | Retorna el ultimo nivel del plan de cuentas | getUltimoNivelCuentas | {
"repo_name": "diego10j/jye",
"path": "jye/jye-ejb/src/java/servicios/contabilidad/ServicioContabilidadGeneral.java",
"license": "gpl-2.0",
"size": 23619
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 1,681,981 |
private static Pref cacheDaisReadDetailWires = Pref.makeBooleanPref("DaisReadDetailWires", IOTool.tool.prefs, true);
public static boolean isDaisReadDetailWires() { return cacheDaisReadDetailWires.getBoolean(); } | private static Pref cacheDaisReadDetailWires = Pref.makeBooleanPref(STR, IOTool.tool.prefs, true); public static boolean isDaisReadDetailWires() { return cacheDaisReadDetailWires.getBoolean(); } | /**
* Method to tell whether Dais Input places cell instances, by default.
* @return true if Dais Input places cell instances, by default.
*/ | Method to tell whether Dais Input places cell instances, by default | isFactoryDaisReadCellInstances | {
"repo_name": "imr/Electric8",
"path": "com/sun/electric/tool/io/IOTool.java",
"license": "gpl-3.0",
"size": 68905
} | [
"com.sun.electric.database.text.Pref"
] | import com.sun.electric.database.text.Pref; | import com.sun.electric.database.text.*; | [
"com.sun.electric"
] | com.sun.electric; | 259,645 |
public void evaluate(Population population) throws OperatorException; | void function(Population population) throws OperatorException; | /**
* This method evaluates the individuals of a population and assigns them their performance vectors
* @param population the population to evaluate
* @throws OperatorException
*/ | This method evaluates the individuals of a population and assigns them their performance vectors | evaluate | {
"repo_name": "rapidminer/rapidminer-5",
"path": "src/com/rapidminer/operator/features/PopulationEvaluator.java",
"license": "agpl-3.0",
"size": 1402
} | [
"com.rapidminer.operator.OperatorException"
] | import com.rapidminer.operator.OperatorException; | import com.rapidminer.operator.*; | [
"com.rapidminer.operator"
] | com.rapidminer.operator; | 1,923,061 |
protected IJavaElement[] getParentElements() {
return this.parentElements;
} | IJavaElement[] function() { return this.parentElements; } | /**
* Returns the parent elements to which this operation applies,
* or <code>null</code> if not applicable.
*/ | Returns the parent elements to which this operation applies, or <code>null</code> if not applicable | getParentElements | {
"repo_name": "gazarenkov/che-sketch",
"path": "plugins/plugin-java/che-plugin-java-ext-jdt/org-eclipse-jdt-ui/src/main/java/org/eclipse/jdt/internal/core/JavaModelOperation.java",
"license": "epl-1.0",
"size": 35774
} | [
"org.eclipse.jdt.core.IJavaElement"
] | import org.eclipse.jdt.core.IJavaElement; | import org.eclipse.jdt.core.*; | [
"org.eclipse.jdt"
] | org.eclipse.jdt; | 1,551,053 |
private static ByteBuffer findApkSignatureSchemeV2Block(ByteBuffer apkSigningBlock)
throws SignatureNotFoundException {
checkByteOrderLittleEndian(apkSigningBlock);
// FORMAT:
// OFFSET DATA TYPE DESCRIPTION
// * @+0 bytes uint64: size in bytes (excluding this field)
// * @+8 bytes pairs
// * @-24 bytes uint64: size in bytes (same as the one above)
// * @-16 bytes uint128: magic
ByteBuffer pairs = sliceFromTo(apkSigningBlock, 8, apkSigningBlock.capacity() - 24);
int entryCount = 0;
while (pairs.hasRemaining()) {
entryCount++;
if (pairs.remaining() < 8) {
throw new SignatureNotFoundException(
"Insufficient data to read size of APK Signing Block entry #" + entryCount);
}
long lenLong = pairs.getLong();
if ((lenLong < 4) || (lenLong > Integer.MAX_VALUE)) {
throw new SignatureNotFoundException(
"APK Signing Block entry #" + entryCount
+ " size out of range: " + lenLong);
}
int len = (int) lenLong;
int nextEntryPos = pairs.position() + len;
if (len > pairs.remaining()) {
throw new SignatureNotFoundException(
"APK Signing Block entry #" + entryCount + " size out of range: " + len
+ ", available: " + pairs.remaining());
}
int id = pairs.getInt();
if (id == APK_SIGNATURE_SCHEME_V2_BLOCK_ID) {
return getByteBuffer(pairs, len - 4);
}
pairs.position(nextEntryPos);
}
throw new SignatureNotFoundException(
"No APK Signature Scheme v2 block in APK Signing Block");
} | static ByteBuffer function(ByteBuffer apkSigningBlock) throws SignatureNotFoundException { checkByteOrderLittleEndian(apkSigningBlock); ByteBuffer pairs = sliceFromTo(apkSigningBlock, 8, apkSigningBlock.capacity() - 24); int entryCount = 0; while (pairs.hasRemaining()) { entryCount++; if (pairs.remaining() < 8) { throw new SignatureNotFoundException( STR + entryCount); } long lenLong = pairs.getLong(); if ((lenLong < 4) (lenLong > Integer.MAX_VALUE)) { throw new SignatureNotFoundException( STR + entryCount + STR + lenLong); } int len = (int) lenLong; int nextEntryPos = pairs.position() + len; if (len > pairs.remaining()) { throw new SignatureNotFoundException( STR + entryCount + STR + len + STR + pairs.remaining()); } int id = pairs.getInt(); if (id == APK_SIGNATURE_SCHEME_V2_BLOCK_ID) { return getByteBuffer(pairs, len - 4); } pairs.position(nextEntryPos); } throw new SignatureNotFoundException( STR); } | /**
* get the v2 schema block from apk signing block
*
* @param apkSigningBlock
* @return
* @throws SignatureNotFoundException
*/ | get the v2 schema block from apk signing block | findApkSignatureSchemeV2Block | {
"repo_name": "wang-qian/ChannelPackage",
"path": "src/com/leon/channel/common/verify/ApkSignatureSchemeV2Verifier.java",
"license": "apache-2.0",
"size": 56414
} | [
"java.nio.ByteBuffer"
] | import java.nio.ByteBuffer; | import java.nio.*; | [
"java.nio"
] | java.nio; | 2,342,768 |
private boolean isBalanced(Map<String, List<TopicPartition>> currentAssignment,
TreeSet<String> sortedCurrentSubscriptions,
Map<String, List<String>> allSubscriptions,
Map<String, Integer> partitionsPerTopic,
int totalPartitionCount) {
int min = currentAssignment.get(sortedCurrentSubscriptions.first()).size();
int max = currentAssignment.get(sortedCurrentSubscriptions.last()).size();
if (min >= max - 1)
// if minimum and maximum numbers of partitions assigned to consumers differ by at most one return true
return true;
// create a mapping from partitions to the consumer assigned to them
final Map<TopicPartition, String> allPartitions = new HashMap<>();
Set<Entry<String, List<TopicPartition>>> assignments = currentAssignment.entrySet();
for (Map.Entry<String, List<TopicPartition>> entry: assignments) {
List<TopicPartition> topicPartitions = entry.getValue();
for (TopicPartition topicPartition: topicPartitions) {
if (allPartitions.containsKey(topicPartition))
log.error("{} is assigned to more than one consumer.", topicPartition);
allPartitions.put(topicPartition, entry.getKey());
}
}
// for each consumer that does not have all the topic partitions it can get make sure none of the topic partitions it
// could but did not get cannot be moved to it (because that would break the balance)
for (String consumer: sortedCurrentSubscriptions) {
List<TopicPartition> consumerPartitions = currentAssignment.get(consumer);
int consumerPartitionCount = consumerPartitions.size();
// skip if this consumer already has all the topic partitions it can get
List<String> allSubscribedTopics = allSubscriptions.get(consumer);
int maxAssignmentSize = getMaxAssignmentSize(totalPartitionCount, allSubscribedTopics, partitionsPerTopic);
if (consumerPartitionCount == maxAssignmentSize)
continue;
// otherwise make sure it cannot get any more
for (String topic: allSubscribedTopics) {
int partitionCount = partitionsPerTopic.get(topic);
for (int i = 0; i < partitionCount; i++) {
TopicPartition topicPartition = new TopicPartition(topic, i);
if (!currentAssignment.get(consumer).contains(topicPartition)) {
String otherConsumer = allPartitions.get(topicPartition);
int otherConsumerPartitionCount = currentAssignment.get(otherConsumer).size();
if (consumerPartitionCount < otherConsumerPartitionCount) {
log.debug("{} can be moved from consumer {} to consumer {} for a more balanced assignment.",
topicPartition, otherConsumer, consumer);
return false;
}
}
}
}
}
return true;
}
/**
* get the maximum assigned partition size of the {@code allSubscribedTopics} | boolean function(Map<String, List<TopicPartition>> currentAssignment, TreeSet<String> sortedCurrentSubscriptions, Map<String, List<String>> allSubscriptions, Map<String, Integer> partitionsPerTopic, int totalPartitionCount) { int min = currentAssignment.get(sortedCurrentSubscriptions.first()).size(); int max = currentAssignment.get(sortedCurrentSubscriptions.last()).size(); if (min >= max - 1) return true; final Map<TopicPartition, String> allPartitions = new HashMap<>(); Set<Entry<String, List<TopicPartition>>> assignments = currentAssignment.entrySet(); for (Map.Entry<String, List<TopicPartition>> entry: assignments) { List<TopicPartition> topicPartitions = entry.getValue(); for (TopicPartition topicPartition: topicPartitions) { if (allPartitions.containsKey(topicPartition)) log.error(STR, topicPartition); allPartitions.put(topicPartition, entry.getKey()); } } for (String consumer: sortedCurrentSubscriptions) { List<TopicPartition> consumerPartitions = currentAssignment.get(consumer); int consumerPartitionCount = consumerPartitions.size(); List<String> allSubscribedTopics = allSubscriptions.get(consumer); int maxAssignmentSize = getMaxAssignmentSize(totalPartitionCount, allSubscribedTopics, partitionsPerTopic); if (consumerPartitionCount == maxAssignmentSize) continue; for (String topic: allSubscribedTopics) { int partitionCount = partitionsPerTopic.get(topic); for (int i = 0; i < partitionCount; i++) { TopicPartition topicPartition = new TopicPartition(topic, i); if (!currentAssignment.get(consumer).contains(topicPartition)) { String otherConsumer = allPartitions.get(topicPartition); int otherConsumerPartitionCount = currentAssignment.get(otherConsumer).size(); if (consumerPartitionCount < otherConsumerPartitionCount) { log.debug(STR, topicPartition, otherConsumer, consumer); return false; } } } } } return true; } /** * get the maximum assigned partition size of the {@code allSubscribedTopics} | /**
* determine if the current assignment is a balanced one
*
* @param currentAssignment: the assignment whose balance needs to be checked
* @param sortedCurrentSubscriptions: an ascending sorted set of consumers based on how many topic partitions are already assigned to them
* @param allSubscriptions: a mapping of all consumers to all potential topics that can be assigned to them
* @param partitionsPerTopic: The number of partitions for each subscribed topic
* @param totalPartitionCount total partition count to be assigned
* @return true if the given assignment is balanced; false otherwise
*/ | determine if the current assignment is a balanced one | isBalanced | {
"repo_name": "TiVo/kafka",
"path": "clients/src/main/java/org/apache/kafka/clients/consumer/internals/AbstractStickyAssignor.java",
"license": "apache-2.0",
"size": 69888
} | [
"java.util.HashMap",
"java.util.List",
"java.util.Map",
"java.util.Set",
"java.util.TreeSet",
"org.apache.kafka.common.TopicPartition"
] | import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import org.apache.kafka.common.TopicPartition; | import java.util.*; import org.apache.kafka.common.*; | [
"java.util",
"org.apache.kafka"
] | java.util; org.apache.kafka; | 1,234,377 |
private void initialize()
{
if( selection != null && selection.isEmpty() == false && selection instanceof IStructuredSelection )
{
IStructuredSelection ssel = (IStructuredSelection) selection;
if( ssel.size() > 1 )
return;
Object obj = ssel.getFirstElement();
if( obj instanceof TreeNode )
{
@SuppressWarnings( { "unchecked" } )
IResource selectedResource = ( (TreeNode<IResource>) obj ).getValue();
if( selectedResource instanceof IResource )
{
IContainer container;
if( selectedResource instanceof IContainer )
{
container = (IContainer) selectedResource;
}
else
{
container = selectedResource.getParent();
}
containerText.setText( container.getFullPath().toString() );
}
}
else
{
if( Logger.isDebugEnable() )
{
Logger.logDebug( getClass().getSimpleName(), "unknown selection type", obj.getClass().toString() );
}
}
}
fileText.setText( SackConstant.MESSAGE_NEW_TTCN_FILE_NAME );
} | void function() { if( selection != null && selection.isEmpty() == false && selection instanceof IStructuredSelection ) { IStructuredSelection ssel = (IStructuredSelection) selection; if( ssel.size() > 1 ) return; Object obj = ssel.getFirstElement(); if( obj instanceof TreeNode ) { @SuppressWarnings( { STR } ) IResource selectedResource = ( (TreeNode<IResource>) obj ).getValue(); if( selectedResource instanceof IResource ) { IContainer container; if( selectedResource instanceof IContainer ) { container = (IContainer) selectedResource; } else { container = selectedResource.getParent(); } containerText.setText( container.getFullPath().toString() ); } } else { if( Logger.isDebugEnable() ) { Logger.logDebug( getClass().getSimpleName(), STR, obj.getClass().toString() ); } } } fileText.setText( SackConstant.MESSAGE_NEW_TTCN_FILE_NAME ); } | /**
* Tests if the current workbench selection is a suitable container to use.
*/ | Tests if the current workbench selection is a suitable container to use | initialize | {
"repo_name": "YudingZhou/kitt",
"path": "src/org/quantumlabs/kitt/ui/wizards/NewTTCNFileWizardPage.java",
"license": "gpl-2.0",
"size": 7897
} | [
"org.eclipse.core.resources.IContainer",
"org.eclipse.core.resources.IResource",
"org.eclipse.jface.viewers.IStructuredSelection",
"org.quantumlabs.kitt.core.util.SackConstant",
"org.quantumlabs.kitt.core.util.trace.Logger",
"org.quantumlabs.kitt.ui.view.model.TreeNode"
] | import org.eclipse.core.resources.IContainer; import org.eclipse.core.resources.IResource; import org.eclipse.jface.viewers.IStructuredSelection; import org.quantumlabs.kitt.core.util.SackConstant; import org.quantumlabs.kitt.core.util.trace.Logger; import org.quantumlabs.kitt.ui.view.model.TreeNode; | import org.eclipse.core.resources.*; import org.eclipse.jface.viewers.*; import org.quantumlabs.kitt.core.util.*; import org.quantumlabs.kitt.core.util.trace.*; import org.quantumlabs.kitt.ui.view.model.*; | [
"org.eclipse.core",
"org.eclipse.jface",
"org.quantumlabs.kitt"
] | org.eclipse.core; org.eclipse.jface; org.quantumlabs.kitt; | 2,559,258 |
public static void writeToFile(File file, String content) throws IOException {
Files.write(file.toPath(), content.getBytes(CHARSET));
}
/**
* Converts a string to a platform-specific file path
* @param pathWithForwardSlash A String representing a file path but using '/' as the separator
* @return {@code pathWithForwardSlash} but '/' replaced with {@code File.separator} | static void function(File file, String content) throws IOException { Files.write(file.toPath(), content.getBytes(CHARSET)); } /** * Converts a string to a platform-specific file path * @param pathWithForwardSlash A String representing a file path but using '/' as the separator * @return {@code pathWithForwardSlash} but '/' replaced with {@code File.separator} | /**
* Writes given string to a file.
* Will create the file if it does not exist yet.
*/ | Writes given string to a file. Will create the file if it does not exist yet | writeToFile | {
"repo_name": "CS2103JAN2017-F14-B3/main",
"path": "src/main/java/onlythree/imanager/commons/util/FileUtil.java",
"license": "mit",
"size": 2586
} | [
"java.io.File",
"java.io.IOException",
"java.nio.file.Files"
] | import java.io.File; import java.io.IOException; import java.nio.file.Files; | import java.io.*; import java.nio.file.*; | [
"java.io",
"java.nio"
] | java.io; java.nio; | 45,945 |
@Test
public void deleteLargeDirectory() throws IOException {
LargeDirectoryConfig config = prepareLargeDirectoryTest();
mUfs.delete(config.getTopLevelDirectory(), true);
String[] children = config.getChildren();
for (String child : children) {
// Retry for some time to allow list operation eventual consistency for S3 and GCS.
// See http://docs.aws.amazon.com/AmazonS3/latest/dev/Introduction.html and
// https://cloud.google.com/storage/docs/consistency for more details.
// Note: not using CommonUtils.waitFor here because we intend to sleep with a longer interval.
boolean childDeleted = false;
for (int i = 0; i < 20; i++) {
childDeleted = !mUfs.exists(child);
if (childDeleted) {
break;
}
CommonUtils.sleepMs(500);
}
Assert.assertTrue(childDeleted);
}
} | void function() throws IOException { LargeDirectoryConfig config = prepareLargeDirectoryTest(); mUfs.delete(config.getTopLevelDirectory(), true); String[] children = config.getChildren(); for (String child : children) { boolean childDeleted = false; for (int i = 0; i < 20; i++) { childDeleted = !mUfs.exists(child); if (childDeleted) { break; } CommonUtils.sleepMs(500); } Assert.assertTrue(childDeleted); } } | /**
* Tests if delete deletes all files or folders for a large directory.
*/ | Tests if delete deletes all files or folders for a large directory | deleteLargeDirectory | {
"repo_name": "bit-zyl/Alluxio-Nvdimm",
"path": "tests/src/test/java/alluxio/underfs/UnderStorageSystemInterfaceIntegrationTest.java",
"license": "apache-2.0",
"size": 20086
} | [
"java.io.IOException",
"org.junit.Assert"
] | import java.io.IOException; import org.junit.Assert; | import java.io.*; import org.junit.*; | [
"java.io",
"org.junit"
] | java.io; org.junit; | 757,453 |
private ShortCircuitReplicaInfo requestFileDescriptors(DomainPeer peer,
Slot slot) throws IOException {
ShortCircuitCache cache = clientContext.getShortCircuitCache();
final DataOutputStream out =
new DataOutputStream(new BufferedOutputStream(peer.getOutputStream()));
SlotId slotId = slot == null ? null : slot.getSlotId();
new Sender(out).requestShortCircuitFds(block, token, slotId, 1,
failureInjector.getSupportsReceiptVerification());
DataInputStream in = new DataInputStream(peer.getInputStream());
BlockOpResponseProto resp = BlockOpResponseProto.parseFrom(
PBHelper.vintPrefixed(in));
DomainSocket sock = peer.getDomainSocket();
failureInjector.injectRequestFileDescriptorsFailure();
switch (resp.getStatus()) {
case SUCCESS:
byte buf[] = new byte[1];
FileInputStream fis[] = new FileInputStream[2];
sock.recvFileInputStreams(fis, buf, 0, buf.length);
ShortCircuitReplica replica = null;
try {
ExtendedBlockId key =
new ExtendedBlockId(block.getBlockId(), block.getBlockPoolId());
if (buf[0] == USE_RECEIPT_VERIFICATION.getNumber()) {
LOG.trace("Sending receipt verification byte for slot " + slot);
sock.getOutputStream().write(0);
}
replica = new ShortCircuitReplica(key, fis[0], fis[1], cache,
Time.monotonicNow(), slot);
return new ShortCircuitReplicaInfo(replica);
} catch (IOException e) {
// This indicates an error reading from disk, or a format error. Since
// it's not a socket communication problem, we return null rather than
// throwing an exception.
LOG.warn(this + ": error creating ShortCircuitReplica.", e);
return null;
} finally {
if (replica == null) {
IOUtils.cleanup(DFSClient.LOG, fis[0], fis[1]);
}
}
case ERROR_UNSUPPORTED:
if (!resp.hasShortCircuitAccessVersion()) {
LOG.warn("short-circuit read access is disabled for " +
"DataNode " + datanode + ". reason: " + resp.getMessage());
clientContext.getDomainSocketFactory()
.disableShortCircuitForPath(pathInfo.getPath());
} else {
LOG.warn("short-circuit read access for the file " +
fileName + " is disabled for DataNode " + datanode +
". reason: " + resp.getMessage());
}
return null;
case ERROR_ACCESS_TOKEN:
String msg = "access control error while " +
"attempting to set up short-circuit access to " +
fileName + resp.getMessage();
if (LOG.isDebugEnabled()) {
LOG.debug(this + ":" + msg);
}
return new ShortCircuitReplicaInfo(new InvalidToken(msg));
default:
LOG.warn(this + ": unknown response code " + resp.getStatus() +
" while attempting to set up short-circuit access. " +
resp.getMessage());
clientContext.getDomainSocketFactory()
.disableShortCircuitForPath(pathInfo.getPath());
return null;
}
} | ShortCircuitReplicaInfo function(DomainPeer peer, Slot slot) throws IOException { ShortCircuitCache cache = clientContext.getShortCircuitCache(); final DataOutputStream out = new DataOutputStream(new BufferedOutputStream(peer.getOutputStream())); SlotId slotId = slot == null ? null : slot.getSlotId(); new Sender(out).requestShortCircuitFds(block, token, slotId, 1, failureInjector.getSupportsReceiptVerification()); DataInputStream in = new DataInputStream(peer.getInputStream()); BlockOpResponseProto resp = BlockOpResponseProto.parseFrom( PBHelper.vintPrefixed(in)); DomainSocket sock = peer.getDomainSocket(); failureInjector.injectRequestFileDescriptorsFailure(); switch (resp.getStatus()) { case SUCCESS: byte buf[] = new byte[1]; FileInputStream fis[] = new FileInputStream[2]; sock.recvFileInputStreams(fis, buf, 0, buf.length); ShortCircuitReplica replica = null; try { ExtendedBlockId key = new ExtendedBlockId(block.getBlockId(), block.getBlockPoolId()); if (buf[0] == USE_RECEIPT_VERIFICATION.getNumber()) { LOG.trace(STR + slot); sock.getOutputStream().write(0); } replica = new ShortCircuitReplica(key, fis[0], fis[1], cache, Time.monotonicNow(), slot); return new ShortCircuitReplicaInfo(replica); } catch (IOException e) { LOG.warn(this + STR, e); return null; } finally { if (replica == null) { IOUtils.cleanup(DFSClient.LOG, fis[0], fis[1]); } } case ERROR_UNSUPPORTED: if (!resp.hasShortCircuitAccessVersion()) { LOG.warn(STR + STR + datanode + STR + resp.getMessage()); clientContext.getDomainSocketFactory() .disableShortCircuitForPath(pathInfo.getPath()); } else { LOG.warn(STR + fileName + STR + datanode + STR + resp.getMessage()); } return null; case ERROR_ACCESS_TOKEN: String msg = STR + STR + fileName + resp.getMessage(); if (LOG.isDebugEnabled()) { LOG.debug(this + ":" + msg); } return new ShortCircuitReplicaInfo(new InvalidToken(msg)); default: LOG.warn(this + STR + resp.getStatus() + STR + resp.getMessage()); clientContext.getDomainSocketFactory() .disableShortCircuitForPath(pathInfo.getPath()); return null; } } | /**
* Request file descriptors from a DomainPeer.
*
* @param peer The peer to use for communication.
* @param slot If non-null, the shared memory slot to associate with the
* new ShortCircuitReplica.
*
* @return A ShortCircuitReplica object if we could communicate with the
* datanode; null, otherwise.
* @throws IOException If we encountered an I/O exception while communicating
* with the datanode.
*/ | Request file descriptors from a DomainPeer | requestFileDescriptors | {
"repo_name": "intel-hadoop/hadoop",
"path": "hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/BlockReaderFactory.java",
"license": "apache-2.0",
"size": 30653
} | [
"java.io.BufferedOutputStream",
"java.io.DataInputStream",
"java.io.DataOutputStream",
"java.io.FileInputStream",
"java.io.IOException",
"org.apache.hadoop.hdfs.net.DomainPeer",
"org.apache.hadoop.hdfs.protocol.datatransfer.Sender",
"org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos",
"org.apache.hadoop.hdfs.protocolPB.PBHelper",
"org.apache.hadoop.hdfs.shortcircuit.ShortCircuitCache",
"org.apache.hadoop.hdfs.shortcircuit.ShortCircuitReplica",
"org.apache.hadoop.hdfs.shortcircuit.ShortCircuitReplicaInfo",
"org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm",
"org.apache.hadoop.io.IOUtils",
"org.apache.hadoop.net.unix.DomainSocket",
"org.apache.hadoop.security.token.SecretManager",
"org.apache.hadoop.util.Time"
] | import java.io.BufferedOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.FileInputStream; import java.io.IOException; import org.apache.hadoop.hdfs.net.DomainPeer; import org.apache.hadoop.hdfs.protocol.datatransfer.Sender; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos; import org.apache.hadoop.hdfs.protocolPB.PBHelper; import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitCache; import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitReplica; import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitReplicaInfo; import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.net.unix.DomainSocket; import org.apache.hadoop.security.token.SecretManager; import org.apache.hadoop.util.Time; | import java.io.*; import org.apache.hadoop.hdfs.*; import org.apache.hadoop.hdfs.net.*; import org.apache.hadoop.hdfs.protocol.datatransfer.*; import org.apache.hadoop.hdfs.protocol.proto.*; import org.apache.hadoop.hdfs.shortcircuit.*; import org.apache.hadoop.io.*; import org.apache.hadoop.net.unix.*; import org.apache.hadoop.security.token.*; import org.apache.hadoop.util.*; | [
"java.io",
"org.apache.hadoop"
] | java.io; org.apache.hadoop; | 2,244,502 |
protected List<String> getSkewedValueFromASTNode(ASTNode ast) {
List<String> colList = new ArrayList<String>();
int numCh = ast.getChildCount();
for (int i = 0; i < numCh; i++) {
ASTNode child = (ASTNode) ast.getChild(i);
colList.add(stripQuotes(child.getText()).toLowerCase());
}
return colList;
} | List<String> function(ASTNode ast) { List<String> colList = new ArrayList<String>(); int numCh = ast.getChildCount(); for (int i = 0; i < numCh; i++) { ASTNode child = (ASTNode) ast.getChild(i); colList.add(stripQuotes(child.getText()).toLowerCase()); } return colList; } | /**
* Given a ASTNode, return list of values.
*
* use case:
* create table xyz list bucketed (col1) with skew (1,2,5)
* AST Node is for (1,2,5)
* @param ast
* @return
*/ | Given a ASTNode, return list of values. use case: create table xyz list bucketed (col1) with skew (1,2,5) AST Node is for (1,2,5) | getSkewedValueFromASTNode | {
"repo_name": "b-slim/hive",
"path": "ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java",
"license": "apache-2.0",
"size": 91275
} | [
"java.util.ArrayList",
"java.util.List"
] | import java.util.ArrayList; import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 2,603,149 |
public String canonicalString()
{
boolean needNormalize =
(hasTimeZone() && getTimeZoneSign() != 0 && hasTime() &&
((hasDay() == hasMonth() && hasDay() == hasYear())));
if (!needNormalize && getFraction()!=null && getFraction().scale() > 0)
{
BigInteger bi = getFraction().unscaledValue();
needNormalize = (bi.mod(TEN).signum() == 0);
}
if (!needNormalize)
return toString();
GDateBuilder cdate = new GDateBuilder(this);
cdate.normalize();
return cdate.toString();
} | String function() { boolean needNormalize = (hasTimeZone() && getTimeZoneSign() != 0 && hasTime() && ((hasDay() == hasMonth() && hasDay() == hasYear()))); if (!needNormalize && getFraction()!=null && getFraction().scale() > 0) { BigInteger bi = getFraction().unscaledValue(); needNormalize = (bi.mod(TEN).signum() == 0); } if (!needNormalize) return toString(); GDateBuilder cdate = new GDateBuilder(this); cdate.normalize(); return cdate.toString(); } | /**
* The canonical string representation. Specific moments or
* times-of-day in a specified timezone are normalized to
* UTC time to produce a canonical string form for them.
* Other recurring time specifications keep their timezone
* information.
*/ | The canonical string representation. Specific moments or times-of-day in a specified timezone are normalized to UTC time to produce a canonical string form for them. Other recurring time specifications keep their timezone information | canonicalString | {
"repo_name": "crow-misia/xmlbeans",
"path": "src/xmlpublic/org/apache/xmlbeans/GDateBuilder.java",
"license": "apache-2.0",
"size": 48444
} | [
"java.math.BigInteger"
] | import java.math.BigInteger; | import java.math.*; | [
"java.math"
] | java.math; | 1,532,142 |
private JsonObject createBase(String name, String label, String type, String path) {
return createBase(name, label, type, path, null, null);
} | JsonObject function(String name, String label, String type, String path) { return createBase(name, label, type, path, null, null); } | /**
* Helper method to create a base element with the given name/label/type/path
*
* @param name the element name
* @param label the element label
* @param type the element type
* @param path the element path
* @return the json object representing the base element
*/ | Helper method to create a base element with the given name/label/type/path | createBase | {
"repo_name": "clinique/openhab2",
"path": "bundles/org.openhab.io.neeo/src/main/java/org/openhab/io/neeo/internal/serialization/NeeoBrainDeviceSerializer.java",
"license": "epl-1.0",
"size": 14623
} | [
"com.google.gson.JsonObject"
] | import com.google.gson.JsonObject; | import com.google.gson.*; | [
"com.google.gson"
] | com.google.gson; | 2,709,809 |
protected void sequence_Add(EObject context, ARI semanticObject) {
genericSequencer.createSequence(context, semanticObject);
}
| void function(EObject context, ARI semanticObject) { genericSequencer.createSequence(context, semanticObject); } | /**
* Constraint:
* (mult+=Mult mult+=Mult*)
*/ | Constraint: (mult+=Mult mult+=Mult*) | sequence_Add | {
"repo_name": "santifa/compilerbau",
"path": "projekt2/Neu/org.xtext.mgpl/src-gen/org/xtext/mgpl/serializer/MgplDSLSemanticSequencer.java",
"license": "bsd-3-clause",
"size": 17791
} | [
"org.eclipse.emf.ecore.EObject"
] | import org.eclipse.emf.ecore.EObject; | import org.eclipse.emf.ecore.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 399,062 |
public static byte[] serializeAndEncodeObject(final CipherExecutor cipher,
final Serializable object,
final Object[] parameters) {
val outBytes = serialize(object);
return (byte[]) cipher.encode(outBytes, parameters);
} | static byte[] function(final CipherExecutor cipher, final Serializable object, final Object[] parameters) { val outBytes = serialize(object); return (byte[]) cipher.encode(outBytes, parameters); } | /**
* Serialize and encode object.
*
* @param cipher the cipher
* @param object the object
* @param parameters the parameters
* @return the byte []
* @since 4.2
*/ | Serialize and encode object | serializeAndEncodeObject | {
"repo_name": "GIP-RECIA/cas",
"path": "core/cas-server-core-util-api/src/main/java/org/apereo/cas/util/serialization/SerializationUtils.java",
"license": "apache-2.0",
"size": 5642
} | [
"java.io.Serializable",
"org.apereo.cas.CipherExecutor"
] | import java.io.Serializable; import org.apereo.cas.CipherExecutor; | import java.io.*; import org.apereo.cas.*; | [
"java.io",
"org.apereo.cas"
] | java.io; org.apereo.cas; | 1,811,324 |
public Method getFunction(int index) {
return functions[index];
}
| Method function(int index) { return functions[index]; } | /**
* Get function by index.
* @param index function index
* @return method
*/ | Get function by index | getFunction | {
"repo_name": "robsoncardosoti/flowable-engine",
"path": "modules/flowable-engine-common/src/main/java/org/flowable/engine/common/impl/de/odysseus/el/tree/Bindings.java",
"license": "apache-2.0",
"size": 5411
} | [
"java.lang.reflect.Method"
] | import java.lang.reflect.Method; | import java.lang.reflect.*; | [
"java.lang"
] | java.lang; | 1,210,462 |
@CheckForNull Resource getParent(@Nonnull Resource child); | @CheckForNull Resource getParent(@Nonnull Resource child); | /**
* Returns the parent resource of this resource.
* <p>
* This method is implemented by getting the parent resource path first
* calling the {@link ResourceUtil#getParent(String)} method and then to
* retrieve that resource.
*
* @param child The {@link Resource Resource} whose parent is requested.
* @return The parent resource or {@code null}.
* @throws NullPointerException If <code>child</code> is <code>null</code>.
* @throws org.apache.sling.api.SlingException If any error occurs acquiring
* the parent resource.
* @throws IllegalStateException if this resource resolver has already been
* {@link #close() closed}.
* @since 2.9 (Sling API Bundle 2.10.0)
*/ | Returns the parent resource of this resource. This method is implemented by getting the parent resource path first calling the <code>ResourceUtil#getParent(String)</code> method and then to retrieve that resource | getParent | {
"repo_name": "nleite/sling",
"path": "bundles/api/src/main/java/org/apache/sling/api/resource/ResourceResolver.java",
"license": "apache-2.0",
"size": 37824
} | [
"javax.annotation.CheckForNull",
"javax.annotation.Nonnull"
] | import javax.annotation.CheckForNull; import javax.annotation.Nonnull; | import javax.annotation.*; | [
"javax.annotation"
] | javax.annotation; | 1,214,957 |
public FlowDumpPayload map(FlowEventDump dump) {
FlowDumpPayload result = generatedMap(dump);
result.setForwardCookie(fallbackIfNull(mapCookie(dump.getForwardCookie()), 0L));
result.setReverseCookie(fallbackIfNull(mapCookie(dump.getReverseCookie()), 0L));
return result;
} | FlowDumpPayload function(FlowEventDump dump) { FlowDumpPayload result = generatedMap(dump); result.setForwardCookie(fallbackIfNull(mapCookie(dump.getForwardCookie()), 0L)); result.setReverseCookie(fallbackIfNull(mapCookie(dump.getReverseCookie()), 0L)); return result; } | /**
* Map {@link FlowEventDump} into {@link FlowDumpPayload}.
*/ | Map <code>FlowEventDump</code> into <code>FlowDumpPayload</code> | map | {
"repo_name": "telstra/open-kilda",
"path": "src-java/base-topology/base-storm-topology/src/main/java/org/openkilda/wfm/share/mappers/HistoryMapper.java",
"license": "apache-2.0",
"size": 10337
} | [
"org.openkilda.messaging.payload.history.FlowDumpPayload",
"org.openkilda.model.history.FlowEventDump"
] | import org.openkilda.messaging.payload.history.FlowDumpPayload; import org.openkilda.model.history.FlowEventDump; | import org.openkilda.messaging.payload.history.*; import org.openkilda.model.history.*; | [
"org.openkilda.messaging",
"org.openkilda.model"
] | org.openkilda.messaging; org.openkilda.model; | 273,600 |
public LegalEntity getEntity() {
return _entity;
} | LegalEntity function() { return _entity; } | /**
* Gets the entity.
* @return The entity
*/ | Gets the entity | getEntity | {
"repo_name": "McLeodMoores/starling",
"path": "projects/analytics/src/main/java/com/opengamma/analytics/financial/equity/EquityDefinition.java",
"license": "apache-2.0",
"size": 3209
} | [
"com.opengamma.analytics.financial.legalentity.LegalEntity"
] | import com.opengamma.analytics.financial.legalentity.LegalEntity; | import com.opengamma.analytics.financial.legalentity.*; | [
"com.opengamma.analytics"
] | com.opengamma.analytics; | 540,257 |
public void fetch() throws WebApiException {
try {
Map<String, Object> params = new HashMap<String, Object>();
params.put("SteamID", this.steamId64);
JSONObject result = WebApi.getJSONData("IEconItems_" + this.getAppId(), "GetPlayerItems", 1, params);
this.items = new HashMap<Integer, GameItem>();
JSONArray itemsData = result.getJSONArray("items");
for(int i = 0; i < itemsData.length(); i ++) {
JSONObject itemData = itemsData.getJSONObject(i);
if(itemData != null) {
try {
GameItem item = this.getItemClass().getConstructor(this.getClass(), JSONObject.class).newInstance(this, itemData);
this.items.put(item.getBackpackPosition() - 1, item);
} catch(IllegalAccessException e) {
} catch(InstantiationException e) {
} catch(InvocationTargetException e) {
} catch(NoSuchMethodException e) {}
}
}
} catch(JSONException e) {
throw new WebApiException("Could not parse JSON data.", e);
}
this.fetchDate = new Date();
} | void function() throws WebApiException { try { Map<String, Object> params = new HashMap<String, Object>(); params.put(STR, this.steamId64); JSONObject result = WebApi.getJSONData(STR + this.getAppId(), STR, 1, params); this.items = new HashMap<Integer, GameItem>(); JSONArray itemsData = result.getJSONArray("items"); for(int i = 0; i < itemsData.length(); i ++) { JSONObject itemData = itemsData.getJSONObject(i); if(itemData != null) { try { GameItem item = this.getItemClass().getConstructor(this.getClass(), JSONObject.class).newInstance(this, itemData); this.items.put(item.getBackpackPosition() - 1, item); } catch(IllegalAccessException e) { } catch(InstantiationException e) { } catch(InvocationTargetException e) { } catch(NoSuchMethodException e) {} } } } catch(JSONException e) { throw new WebApiException(STR, e); } this.fetchDate = new Date(); } | /**
* Updates the contents of the backpack using Steam Web API
*
* @throws WebApiException on Web API errors
*/ | Updates the contents of the backpack using Steam Web API | fetch | {
"repo_name": "gutomaia/steam-condenser-java",
"path": "src/main/java/com/github/koraktor/steamcondenser/steam/community/GameInventory.java",
"license": "bsd-3-clause",
"size": 9323
} | [
"com.github.koraktor.steamcondenser.exceptions.WebApiException",
"java.lang.reflect.InvocationTargetException",
"java.util.Date",
"java.util.HashMap",
"java.util.Map",
"org.json.JSONArray",
"org.json.JSONException",
"org.json.JSONObject"
] | import com.github.koraktor.steamcondenser.exceptions.WebApiException; import java.lang.reflect.InvocationTargetException; import java.util.Date; import java.util.HashMap; import java.util.Map; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; | import com.github.koraktor.steamcondenser.exceptions.*; import java.lang.reflect.*; import java.util.*; import org.json.*; | [
"com.github.koraktor",
"java.lang",
"java.util",
"org.json"
] | com.github.koraktor; java.lang; java.util; org.json; | 1,393,457 |
@ServiceMethod(returns = ReturnType.SINGLE)
public ExpressRouteGatewayListInner listByResourceGroup(String resourceGroupName) {
return listByResourceGroupAsync(resourceGroupName).block();
} | @ServiceMethod(returns = ReturnType.SINGLE) ExpressRouteGatewayListInner function(String resourceGroupName) { return listByResourceGroupAsync(resourceGroupName).block(); } | /**
* Lists ExpressRoute gateways in a given resource group.
*
* @param resourceGroupName The name of the resource group.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of ExpressRoute gateways.
*/ | Lists ExpressRoute gateways in a given resource group | listByResourceGroup | {
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-network/src/main/java/com/azure/resourcemanager/network/implementation/ExpressRouteGatewaysClientImpl.java",
"license": "mit",
"size": 52917
} | [
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.resourcemanager.network.fluent.models.ExpressRouteGatewayListInner"
] | import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.resourcemanager.network.fluent.models.ExpressRouteGatewayListInner; | import com.azure.core.annotation.*; import com.azure.resourcemanager.network.fluent.models.*; | [
"com.azure.core",
"com.azure.resourcemanager"
] | com.azure.core; com.azure.resourcemanager; | 1,759,116 |
public static Object extractResultBody(Exchange exchange, ExchangePattern pattern) {
Object answer = null;
if (exchange != null) {
// rethrow if there was an exception during execution
if (exchange.getException() != null) {
throw ObjectHelper.wrapCamelExecutionException(exchange, exchange.getException());
}
// result could have a fault message
if (hasFaultMessage(exchange)) {
Message msg = exchange.hasOut() ? exchange.getOut() : exchange.getIn();
answer = msg.getBody();
return answer;
}
// okay no fault then return the response according to the pattern
// try to honor pattern if provided
boolean notOut = pattern != null && !pattern.isOutCapable();
boolean hasOut = exchange.hasOut();
if (hasOut && !notOut) {
// we have a response in out and the pattern is out capable
answer = exchange.getOut().getBody();
} else if (!hasOut && exchange.getPattern() == ExchangePattern.InOptionalOut) {
// special case where the result is InOptionalOut and with no OUT response
// so we should return null to indicate this fact
answer = null;
} else {
// use IN as the response
answer = exchange.getIn().getBody();
}
}
return answer;
} | static Object function(Exchange exchange, ExchangePattern pattern) { Object answer = null; if (exchange != null) { if (exchange.getException() != null) { throw ObjectHelper.wrapCamelExecutionException(exchange, exchange.getException()); } if (hasFaultMessage(exchange)) { Message msg = exchange.hasOut() ? exchange.getOut() : exchange.getIn(); answer = msg.getBody(); return answer; } boolean notOut = pattern != null && !pattern.isOutCapable(); boolean hasOut = exchange.hasOut(); if (hasOut && !notOut) { answer = exchange.getOut().getBody(); } else if (!hasOut && exchange.getPattern() == ExchangePattern.InOptionalOut) { answer = null; } else { answer = exchange.getIn().getBody(); } } return answer; } | /**
* Extracts the body from the given exchange.
* <p/>
* If the exchange pattern is provided it will try to honor it and retrieve the body
* from either IN or OUT according to the pattern.
*
* @param exchange the exchange
* @param pattern exchange pattern if given, can be <tt>null</tt>
* @return the result body, can be <tt>null</tt>.
* @throws CamelExecutionException is thrown if the processing of the exchange failed
*/ | Extracts the body from the given exchange. If the exchange pattern is provided it will try to honor it and retrieve the body from either IN or OUT according to the pattern | extractResultBody | {
"repo_name": "tlehoux/camel",
"path": "camel-core/src/main/java/org/apache/camel/util/ExchangeHelper.java",
"license": "apache-2.0",
"size": 39454
} | [
"org.apache.camel.Exchange",
"org.apache.camel.ExchangePattern",
"org.apache.camel.Message"
] | import org.apache.camel.Exchange; import org.apache.camel.ExchangePattern; import org.apache.camel.Message; | import org.apache.camel.*; | [
"org.apache.camel"
] | org.apache.camel; | 160,057 |
@Override
public EncodedSeeker createSeeker(CellComparator comparator,
HFileBlockDecodingContext decodingCtx) {
if (comparator instanceof MetaCellComparator) {
throw new IllegalArgumentException(
"DataBlockEncoding.PREFIX_TREE not compatible with hbase:meta " + "table");
}
return new PrefixTreeSeeker(decodingCtx.getHFileContext().isIncludesMvcc());
} | EncodedSeeker function(CellComparator comparator, HFileBlockDecodingContext decodingCtx) { if (comparator instanceof MetaCellComparator) { throw new IllegalArgumentException( STR + "table"); } return new PrefixTreeSeeker(decodingCtx.getHFileContext().isIncludesMvcc()); } | /**
* Is this the correct handling of an illegal comparator? How to prevent that from getting all
* the way to this point.
*/ | Is this the correct handling of an illegal comparator? How to prevent that from getting all the way to this point | createSeeker | {
"repo_name": "SeekerResource/hbase",
"path": "hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java",
"license": "apache-2.0",
"size": 8854
} | [
"org.apache.hadoop.hbase.CellComparator",
"org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext"
] | import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext; | import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.io.encoding.*; | [
"org.apache.hadoop"
] | org.apache.hadoop; | 209,219 |
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
//processRequest(request, response);
response.setContentType("text/html;charset=UTF-8");
PrintWriter out = response.getWriter();
ServletContext context = getServletContext();
Map<String, String[]> paramMap = request.getParameterMap();
String descType = paramMap.get("desctype")[0];
String objId = paramMap.get("hasID")[0];
ConfigParameters cp;
switch (descType) {
case ResourceConfigParams.objType:
cp = new ResourceConfigParams(context);
break;
case EntityConfigParams.objType:
cp = new EntityConfigParams(context);
break;
case ServiceConfigParams.objType:
cp = new ServiceConfigParams(context);
break;
default:
throw new IllegalArgumentException("Invalid description type: " + descType);
}
Path path = Paths.get(cp.getIndex());
if (Files.exists(path, LinkOption.NOFOLLOW_LINKS)) {
WebFormToInstance foi = new WebFormToInstance(cp);
Model mQuery = ModelFactory.createDefaultModel();
mQuery.add(foi.createJenaModel(paramMap));
JenaModelMgmt crmInstance = new JenaModelMgmt(cp, objId);
ObjectDataStructure obsForDiscovery = crmInstance.getConceptsFromInstance(mQuery, null);
System.out.println("concepts are: " + obsForDiscovery.getAllConcepts().toString());
NonLogicMatchmaking nlmQ = new NonLogicMatchmaking(cp);
ArrayList<String> descriptionIDs = nlmQ.matchToQuery(obsForDiscovery); //only one allowed in query
ArrayList<Map> resultLinks = new ArrayList<>();
String jsonResult = "";
for (int i = 0; i < descriptionIDs.size(); i++) {
String descriptionLink = context.getContextPath()
+ RestReqApplication.restletPath
+ RestReqApplication.lookupPrefix
+ cp.getDescTypeLinkSuffix()
+ "/" + descriptionIDs.get(i);
WebFormResultLinks frl = new WebFormResultLinks(descriptionLink);
resultLinks.add(frl.getFormatLinks());
}
jsonResult = new Gson().toJson(resultLinks);
System.out.println(jsonResult);
out.println(jsonResult);
out.close();
} else {
try {
response.sendError(HttpServletResponse.SC_CONFLICT, "Search Index for " + cp.getObjType().toUpperCase() + " has not yet been created");
} finally {
out.close();
}
}
}
| void function(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.setContentType(STR); PrintWriter out = response.getWriter(); ServletContext context = getServletContext(); Map<String, String[]> paramMap = request.getParameterMap(); String descType = paramMap.get(STR)[0]; String objId = paramMap.get("hasID")[0]; ConfigParameters cp; switch (descType) { case ResourceConfigParams.objType: cp = new ResourceConfigParams(context); break; case EntityConfigParams.objType: cp = new EntityConfigParams(context); break; case ServiceConfigParams.objType: cp = new ServiceConfigParams(context); break; default: throw new IllegalArgumentException(STR + descType); } Path path = Paths.get(cp.getIndex()); if (Files.exists(path, LinkOption.NOFOLLOW_LINKS)) { WebFormToInstance foi = new WebFormToInstance(cp); Model mQuery = ModelFactory.createDefaultModel(); mQuery.add(foi.createJenaModel(paramMap)); JenaModelMgmt crmInstance = new JenaModelMgmt(cp, objId); ObjectDataStructure obsForDiscovery = crmInstance.getConceptsFromInstance(mQuery, null); System.out.println(STR + obsForDiscovery.getAllConcepts().toString()); NonLogicMatchmaking nlmQ = new NonLogicMatchmaking(cp); ArrayList<String> descriptionIDs = nlmQ.matchToQuery(obsForDiscovery); ArrayList<Map> resultLinks = new ArrayList<>(); String jsonResult = STR/STRSearch Index for STR has not yet been created"); } finally { out.close(); } } } | /**
* Handles the HTTP <code>POST</code> method.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/ | Handles the HTTP <code>POST</code> method | doPost | {
"repo_name": "UniSurreyIoT/fiware-iot-discovery-s2w",
"path": "src/main/java/uk/ac/surrey/ee/iot/s2w/model/iota/servlet/IotaDiscoverWebFormServlet.java",
"license": "agpl-3.0",
"size": 6573
} | [
"com.hp.hpl.jena.rdf.model.Model",
"com.hp.hpl.jena.rdf.model.ModelFactory",
"java.io.IOException",
"java.io.PrintWriter",
"java.nio.file.Files",
"java.nio.file.LinkOption",
"java.nio.file.Path",
"java.nio.file.Paths",
"java.util.ArrayList",
"java.util.Map",
"javax.servlet.ServletContext",
"javax.servlet.ServletException",
"javax.servlet.http.HttpServletRequest",
"javax.servlet.http.HttpServletResponse",
"uk.ac.surrey.ee.iot.s2w.config.ConfigParameters",
"uk.ac.surrey.ee.iot.s2w.config.EntityConfigParams",
"uk.ac.surrey.ee.iot.s2w.config.ResourceConfigParams",
"uk.ac.surrey.ee.iot.s2w.config.ServiceConfigParams",
"uk.ac.surrey.ee.iot.s2w.jena.JenaModelMgmt",
"uk.ac.surrey.ee.iot.s2w.probengine.core.NonLogicMatchmaking",
"uk.ac.surrey.ee.iot.s2w.probengine.core.ObjectDataStructure"
] | import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; import java.io.IOException; import java.io.PrintWriter; import java.nio.file.Files; import java.nio.file.LinkOption; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Map; import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import uk.ac.surrey.ee.iot.s2w.config.ConfigParameters; import uk.ac.surrey.ee.iot.s2w.config.EntityConfigParams; import uk.ac.surrey.ee.iot.s2w.config.ResourceConfigParams; import uk.ac.surrey.ee.iot.s2w.config.ServiceConfigParams; import uk.ac.surrey.ee.iot.s2w.jena.JenaModelMgmt; import uk.ac.surrey.ee.iot.s2w.probengine.core.NonLogicMatchmaking; import uk.ac.surrey.ee.iot.s2w.probengine.core.ObjectDataStructure; | import com.hp.hpl.jena.rdf.model.*; import java.io.*; import java.nio.file.*; import java.util.*; import javax.servlet.*; import javax.servlet.http.*; import uk.ac.surrey.ee.iot.s2w.config.*; import uk.ac.surrey.ee.iot.s2w.jena.*; import uk.ac.surrey.ee.iot.s2w.probengine.core.*; | [
"com.hp.hpl",
"java.io",
"java.nio",
"java.util",
"javax.servlet",
"uk.ac.surrey"
] | com.hp.hpl; java.io; java.nio; java.util; javax.servlet; uk.ac.surrey; | 994,133 |
int no_of_affected_rows;
try{
dbConnection = DatabaseManager.getConnection();
logger.info("Add New Campaign : Database connected");
preparedStatement = dbConnection.prepareStatement(SQL.ADD_NEW_CAMPAIGN);
preparedStatement.setString(1, campaignVO.getCampaignName());
preparedStatement.setInt(2, campaignVO.getContentId());
preparedStatement.setString(3, campaignVO.getCampaignContent());
preparedStatement.setInt(4, CampaignStatus.NASCENT.getValue());
preparedStatement.setInt(5, campaignVO.getGroupId());
preparedStatement.setInt(6, campaignVO.getUserId());
no_of_affected_rows = preparedStatement.executeUpdate();
return no_of_affected_rows;
}catch(SQLException | NamingException e){
logger.fatal("Add New Contact: SQLException "+e.getMessage(), e);
return Constants.ERROR_RESPONSE;
}finally{
try{
closeGracefully();
}catch(SQLException e){
logger.fatal("Add New Contact: SQLException "+e.getMessage(), e);
return Constants.ERROR_RESPONSE;
}
}
} | int no_of_affected_rows; try{ dbConnection = DatabaseManager.getConnection(); logger.info(STR); preparedStatement = dbConnection.prepareStatement(SQL.ADD_NEW_CAMPAIGN); preparedStatement.setString(1, campaignVO.getCampaignName()); preparedStatement.setInt(2, campaignVO.getContentId()); preparedStatement.setString(3, campaignVO.getCampaignContent()); preparedStatement.setInt(4, CampaignStatus.NASCENT.getValue()); preparedStatement.setInt(5, campaignVO.getGroupId()); preparedStatement.setInt(6, campaignVO.getUserId()); no_of_affected_rows = preparedStatement.executeUpdate(); return no_of_affected_rows; }catch(SQLException NamingException e){ logger.fatal(STR+e.getMessage(), e); return Constants.ERROR_RESPONSE; }finally{ try{ closeGracefully(); }catch(SQLException e){ logger.fatal(STR+e.getMessage(), e); return Constants.ERROR_RESPONSE; } } } | /***
* Inserts a Campaign in DB
* @param campaignVO
* returns number of affected rows
*/ | Inserts a Campaign in DB | addNewCampaign | {
"repo_name": "nbsiva90/mailmaster",
"path": "src/com/bliss/mailmaster/dao/impl/CampaignDAOImpl.java",
"license": "apache-2.0",
"size": 6282
} | [
"com.bliss.mailmaster.utils.Constants",
"com.bliss.mailmaster.utils.DatabaseManager",
"java.sql.SQLException",
"javax.naming.NamingException"
] | import com.bliss.mailmaster.utils.Constants; import com.bliss.mailmaster.utils.DatabaseManager; import java.sql.SQLException; import javax.naming.NamingException; | import com.bliss.mailmaster.utils.*; import java.sql.*; import javax.naming.*; | [
"com.bliss.mailmaster",
"java.sql",
"javax.naming"
] | com.bliss.mailmaster; java.sql; javax.naming; | 93,016 |
Font getFont(StyleContext sc, AttributeSet a, int defaultSize, StyleSheet ss) {
ss = getStyleSheet(ss);
int size = getFontSize(a, defaultSize, ss);
StringValue vAlignV = (StringValue)a.getAttribute
(CSS.Attribute.VERTICAL_ALIGN);
if ((vAlignV != null)) {
String vAlign = vAlignV.toString();
if ((vAlign.indexOf("sup") >= 0) ||
(vAlign.indexOf("sub") >= 0)) {
size -= 2;
}
}
FontFamily familyValue = (FontFamily)a.getAttribute
(CSS.Attribute.FONT_FAMILY);
String family = (familyValue != null) ? familyValue.getValue() :
Font.SANS_SERIF;
int style = Font.PLAIN;
FontWeight weightValue = (FontWeight) a.getAttribute
(CSS.Attribute.FONT_WEIGHT);
if ((weightValue != null) && (weightValue.getValue() > 400)) {
style |= Font.BOLD;
}
Object fs = a.getAttribute(CSS.Attribute.FONT_STYLE);
if ((fs != null) && (fs.toString().indexOf("italic") >= 0)) {
style |= Font.ITALIC;
}
if (family.equalsIgnoreCase("monospace")) {
family = Font.MONOSPACED;
}
Font f = sc.getFont(family, style, size);
if (f == null
|| (f.getFamily().equals(Font.DIALOG)
&& ! family.equalsIgnoreCase(Font.DIALOG))) {
family = Font.SANS_SERIF;
f = sc.getFont(family, style, size);
}
return f;
} | Font getFont(StyleContext sc, AttributeSet a, int defaultSize, StyleSheet ss) { ss = getStyleSheet(ss); int size = getFontSize(a, defaultSize, ss); StringValue vAlignV = (StringValue)a.getAttribute (CSS.Attribute.VERTICAL_ALIGN); if ((vAlignV != null)) { String vAlign = vAlignV.toString(); if ((vAlign.indexOf("sup") >= 0) (vAlign.indexOf("sub") >= 0)) { size -= 2; } } FontFamily familyValue = (FontFamily)a.getAttribute (CSS.Attribute.FONT_FAMILY); String family = (familyValue != null) ? familyValue.getValue() : Font.SANS_SERIF; int style = Font.PLAIN; FontWeight weightValue = (FontWeight) a.getAttribute (CSS.Attribute.FONT_WEIGHT); if ((weightValue != null) && (weightValue.getValue() > 400)) { style = Font.BOLD; } Object fs = a.getAttribute(CSS.Attribute.FONT_STYLE); if ((fs != null) && (fs.toString().indexOf(STR) >= 0)) { style = Font.ITALIC; } if (family.equalsIgnoreCase(STR)) { family = Font.MONOSPACED; } Font f = sc.getFont(family, style, size); if (f == null (f.getFamily().equals(Font.DIALOG) && ! family.equalsIgnoreCase(Font.DIALOG))) { family = Font.SANS_SERIF; f = sc.getFont(family, style, size); } return f; } | /**
* Returns the font for the values in the passed in AttributeSet.
* It is assumed the keys will be CSS.Attribute keys.
* <code>sc</code> is the StyleContext that will be messaged to get
* the font once the size, name and style have been determined.
*/ | Returns the font for the values in the passed in AttributeSet. It is assumed the keys will be CSS.Attribute keys. <code>sc</code> is the StyleContext that will be messaged to get the font once the size, name and style have been determined | getFont | {
"repo_name": "md-5/jdk10",
"path": "src/java.desktop/share/classes/javax/swing/text/html/CSS.java",
"license": "gpl-2.0",
"size": 136569
} | [
"java.awt.Font",
"javax.swing.text.AttributeSet",
"javax.swing.text.StyleContext"
] | import java.awt.Font; import javax.swing.text.AttributeSet; import javax.swing.text.StyleContext; | import java.awt.*; import javax.swing.text.*; | [
"java.awt",
"javax.swing"
] | java.awt; javax.swing; | 233,009 |
private void readObject(ObjectInputStream stream)
throws IOException, ClassNotFoundException {
stream.defaultReadObject();
this.fillPaint = SerialUtilities.readPaint(stream);
this.outlinePaint = SerialUtilities.readPaint(stream);
this.outlineStroke = SerialUtilities.readStroke(stream);
}
| void function(ObjectInputStream stream) throws IOException, ClassNotFoundException { stream.defaultReadObject(); this.fillPaint = SerialUtilities.readPaint(stream); this.outlinePaint = SerialUtilities.readPaint(stream); this.outlineStroke = SerialUtilities.readStroke(stream); } | /**
* Provides serialization support.
*
* @param stream the input stream.
*
* @throws IOException if there is an I/O error.
* @throws ClassNotFoundException if there is a classpath problem.
*/ | Provides serialization support | readObject | {
"repo_name": "sebkur/JFreeChart",
"path": "src/main/java/org/jfree/chart/plot/dial/DialCap.java",
"license": "lgpl-3.0",
"size": 10682
} | [
"java.io.IOException",
"java.io.ObjectInputStream",
"org.jfree.io.SerialUtilities"
] | import java.io.IOException; import java.io.ObjectInputStream; import org.jfree.io.SerialUtilities; | import java.io.*; import org.jfree.io.*; | [
"java.io",
"org.jfree.io"
] | java.io; org.jfree.io; | 1,859,493 |
public static int getProjectCount(String query, PrideFilter... filters) throws IOException {
return getCount(getQueryURL(PrideQuery.GET_PROJECTS_BY_CRITERIA.getQueryTemplate(true), query, filters));
} | static int function(String query, PrideFilter... filters) throws IOException { return getCount(getQueryURL(PrideQuery.GET_PROJECTS_BY_CRITERIA.getQueryTemplate(true), query, filters)); } | /**
* Returns a project count for the given criteria and filters.
*
* @param query the criteria for the search
* @param filters the filters for the search
* @return a project count for the given criteria and filters
* @throws IOException thrown if the webservice is down or the json response
* was invalid
*/ | Returns a project count for the given criteria and filters | getProjectCount | {
"repo_name": "compomics/compomics-utilities",
"path": "src/main/java/com/compomics/util/pride/PrideWebService.java",
"license": "apache-2.0",
"size": 20970
} | [
"com.compomics.util.pride.prideobjects.webservice.PrideQuery",
"com.compomics.util.pride.prideobjects.webservice.query.PrideFilter",
"java.io.IOException"
] | import com.compomics.util.pride.prideobjects.webservice.PrideQuery; import com.compomics.util.pride.prideobjects.webservice.query.PrideFilter; import java.io.IOException; | import com.compomics.util.pride.prideobjects.webservice.*; import com.compomics.util.pride.prideobjects.webservice.query.*; import java.io.*; | [
"com.compomics.util",
"java.io"
] | com.compomics.util; java.io; | 2,015,695 |
private void assertTimeout(final long timeout, final TimeUnit unit,
final Callable<Void> callable) {
final ExecutorService es = Executors.newSingleThreadExecutor();
final Future<Void> ret = es.submit(callable);
final long begin = System.currentTimeMillis();
try {
// await Future with timeout.
ret.get(timeout, unit);
final long elapsed = System.currentTimeMillis() - begin;
fail("Expected timeout: elapsed=" + elapsed + "ms, timeout="
+ timeout + " " + unit);
} catch (TimeoutException e) {
// that is expected
final long elapsed = System.currentTimeMillis() - begin;
if (log.isInfoEnabled())
log.info("timeout after " + elapsed + "ms");
return;
} catch (Exception e) {
final long elapsed = System.currentTimeMillis() - begin;
fail("Expected timeout: elapsed=" + elapsed + ", timeout="
+ timeout + " " + unit, e);
} finally {
log.warn("Cancelling task - should interrupt accept()");
ret.cancel(true);
es.shutdown();
}
} | void function(final long timeout, final TimeUnit unit, final Callable<Void> callable) { final ExecutorService es = Executors.newSingleThreadExecutor(); final Future<Void> ret = es.submit(callable); final long begin = System.currentTimeMillis(); try { ret.get(timeout, unit); final long elapsed = System.currentTimeMillis() - begin; fail(STR + elapsed + STR + timeout + " " + unit); } catch (TimeoutException e) { final long elapsed = System.currentTimeMillis() - begin; if (log.isInfoEnabled()) log.info(STR + elapsed + "ms"); return; } catch (Exception e) { final long elapsed = System.currentTimeMillis() - begin; fail(STR + elapsed + STR + timeout + " " + unit, e); } finally { log.warn(STR); ret.cancel(true); es.shutdown(); } } | /**
* Fail the test if the {@link Callable} completes before the specified
* timeout.
*
* @param timeout
* @param unit
* @param callable
*/ | Fail the test if the <code>Callable</code> completes before the specified timeout | assertTimeout | {
"repo_name": "blazegraph/database",
"path": "bigdata-core-test/bigdata/src/test/com/bigdata/ha/pipeline/TestSocketsDirect.java",
"license": "gpl-2.0",
"size": 29052
} | [
"java.util.concurrent.Callable",
"java.util.concurrent.ExecutorService",
"java.util.concurrent.Executors",
"java.util.concurrent.Future",
"java.util.concurrent.TimeUnit",
"java.util.concurrent.TimeoutException"
] | import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; | import java.util.concurrent.*; | [
"java.util"
] | java.util; | 171,603 |
@Override
public List<String> getCommandAliases() {
List<String> aliases = new ArrayList<>();
aliases.add(this.COMMAND);
return aliases;
} | List<String> function() { List<String> aliases = new ArrayList<>(); aliases.add(this.COMMAND); return aliases; } | /**
* creates the list of possible aliases for the command
*
* @return a list of aliases
*/ | creates the list of possible aliases for the command | getCommandAliases | {
"repo_name": "Lergin/Laborus",
"path": "src/main/java/de/lergin/laborus/command/ReloadCommand.java",
"license": "mit",
"size": 2647
} | [
"java.util.ArrayList",
"java.util.List"
] | import java.util.ArrayList; import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 873,154 |
public static ILocation getAnchorLocation(Anchor anchor) {
return Graphiti.getPeLayoutService().getLocationRelativeToDiagram(anchor);
} | static ILocation function(Anchor anchor) { return Graphiti.getPeLayoutService().getLocationRelativeToDiagram(anchor); } | /**
* Returns the absolute location of that anchor
*
* @param anchor
* @return
*/ | Returns the absolute location of that anchor | getAnchorLocation | {
"repo_name": "camunda/camunda-eclipse-plugin",
"path": "org.camunda.bpm.modeler/src/org/camunda/bpm/modeler/core/layout/util/LayoutUtil.java",
"license": "epl-1.0",
"size": 45536
} | [
"org.eclipse.graphiti.datatypes.ILocation",
"org.eclipse.graphiti.mm.pictograms.Anchor",
"org.eclipse.graphiti.services.Graphiti"
] | import org.eclipse.graphiti.datatypes.ILocation; import org.eclipse.graphiti.mm.pictograms.Anchor; import org.eclipse.graphiti.services.Graphiti; | import org.eclipse.graphiti.datatypes.*; import org.eclipse.graphiti.mm.pictograms.*; import org.eclipse.graphiti.services.*; | [
"org.eclipse.graphiti"
] | org.eclipse.graphiti; | 949,809 |
@Nonnull
public IdentityCollectionRequest skipToken(@Nonnull final String skipToken) {
addSkipTokenOption(skipToken);
return this;
} | IdentityCollectionRequest function(@Nonnull final String skipToken) { addSkipTokenOption(skipToken); return this; } | /**
* Add Skip token for pagination
* @param skipToken - Token for pagination
* @return the updated request
*/ | Add Skip token for pagination | skipToken | {
"repo_name": "microsoftgraph/msgraph-sdk-java",
"path": "src/main/java/com/microsoft/graph/externalconnectors/requests/IdentityCollectionRequest.java",
"license": "mit",
"size": 5713
} | [
"com.microsoft.graph.externalconnectors.requests.IdentityCollectionRequest",
"javax.annotation.Nonnull"
] | import com.microsoft.graph.externalconnectors.requests.IdentityCollectionRequest; import javax.annotation.Nonnull; | import com.microsoft.graph.externalconnectors.requests.*; import javax.annotation.*; | [
"com.microsoft.graph",
"javax.annotation"
] | com.microsoft.graph; javax.annotation; | 159,208 |
private boolean checkServerStatus(String serverUrl, Set<String> errors) {
boolean isResponsive = true;
HttpURLConnection engineConn = null;
URL engine;
try {
engine = new URL(serverUrl);
if (isHttpsProtocol) {
engineConn = (HttpsURLConnection) engine.openConnection();
((HttpsURLConnection) engineConn).setSSLSocketFactory(sslFactory);
if (sslIgnoreHostVerification) {
((HttpsURLConnection) engineConn).setHostnameVerifier(IgnoredHostnameVerifier);
}
} else {
engineConn = (HttpURLConnection) engine.openConnection();
}
} catch (IOException e) {
errors.add(e.getMessage());
isResponsive = false;
}
if (isResponsive) {
try {
int responseCode = engineConn.getResponseCode();
if (responseCode != HttpURLConnection.HTTP_OK) {
isResponsive = false;
log.debug(MessageFormat.format("Server is non responsive with response code: {0}", responseCode));
}
} catch (Exception e) {
errors.add(e.getMessage());
isResponsive = false;
} finally {
if (engineConn != null) {
engineConn.disconnect();
engineConn = null;
}
}
}
log.debug("checkServerStatus return: " + isResponsive);
return isResponsive;
}
/**
* Adds an event to audit_log table, representing server status
* @param eventType
* {@code AuditLogType.VDC_START} or {@code AuditLogType.VDC_STOP} events
* @param eventId
* id associated with {@code eventType} parameter
* @param severity
* severity associated with eventType, values are taken from {@code AuditLogSeverity} | boolean function(String serverUrl, Set<String> errors) { boolean isResponsive = true; HttpURLConnection engineConn = null; URL engine; try { engine = new URL(serverUrl); if (isHttpsProtocol) { engineConn = (HttpsURLConnection) engine.openConnection(); ((HttpsURLConnection) engineConn).setSSLSocketFactory(sslFactory); if (sslIgnoreHostVerification) { ((HttpsURLConnection) engineConn).setHostnameVerifier(IgnoredHostnameVerifier); } } else { engineConn = (HttpURLConnection) engine.openConnection(); } } catch (IOException e) { errors.add(e.getMessage()); isResponsive = false; } if (isResponsive) { try { int responseCode = engineConn.getResponseCode(); if (responseCode != HttpURLConnection.HTTP_OK) { isResponsive = false; log.debug(MessageFormat.format(STR, responseCode)); } } catch (Exception e) { errors.add(e.getMessage()); isResponsive = false; } finally { if (engineConn != null) { engineConn.disconnect(); engineConn = null; } } } log.debug(STR + isResponsive); return isResponsive; } /** * Adds an event to audit_log table, representing server status * @param eventType * {@code AuditLogType.VDC_START} or {@code AuditLogType.VDC_STOP} events * @param eventId * id associated with {@code eventType} parameter * @param severity * severity associated with eventType, values are taken from {@code AuditLogSeverity} | /**
* Examines the status of the backend engine server
*
* @param serverUrl
* the engine server url of Health Servlet
* @param errors
* collection which aggregates any error
* @return true is engine server is responsive (response with code 200 - HTTP_OK), else false
*/ | Examines the status of the backend engine server | checkServerStatus | {
"repo_name": "derekhiggins/ovirt-engine",
"path": "backend/manager/tools/engine-notifier/engine-notifier-service/src/main/java/org/ovirt/engine/core/notifier/EngineMonitorService.java",
"license": "apache-2.0",
"size": 23559
} | [
"java.io.IOException",
"java.net.HttpURLConnection",
"java.text.MessageFormat",
"java.util.Set",
"javax.net.ssl.HttpsURLConnection",
"org.ovirt.engine.core.common.AuditLogSeverity",
"org.ovirt.engine.core.common.AuditLogType"
] | import java.io.IOException; import java.net.HttpURLConnection; import java.text.MessageFormat; import java.util.Set; import javax.net.ssl.HttpsURLConnection; import org.ovirt.engine.core.common.AuditLogSeverity; import org.ovirt.engine.core.common.AuditLogType; | import java.io.*; import java.net.*; import java.text.*; import java.util.*; import javax.net.ssl.*; import org.ovirt.engine.core.common.*; | [
"java.io",
"java.net",
"java.text",
"java.util",
"javax.net",
"org.ovirt.engine"
] | java.io; java.net; java.text; java.util; javax.net; org.ovirt.engine; | 2,508,964 |
public ParseNode AnalyzesOk(String stmt) {
return AnalyzesOk(stmt, createAnalyzer(Catalog.DEFAULT_DB), null);
} | ParseNode function(String stmt) { return AnalyzesOk(stmt, createAnalyzer(Catalog.DEFAULT_DB), null); } | /**
* Analyze 'stmt', expecting it to pass. Asserts in case of analysis error.
*/ | Analyze 'stmt', expecting it to pass. Asserts in case of analysis error | AnalyzesOk | {
"repo_name": "dayutianfei/impala-Q",
"path": "fe/src/test/java/com/cloudera/impala/analysis/AnalyzerTest.java",
"license": "apache-2.0",
"size": 30096
} | [
"com.cloudera.impala.catalog.Catalog"
] | import com.cloudera.impala.catalog.Catalog; | import com.cloudera.impala.catalog.*; | [
"com.cloudera.impala"
] | com.cloudera.impala; | 295,255 |
protected String signAndBase64Encode(byte[] data, String key, SigningAlgorithm algorithm)
throws ClientException {
try {
byte[] signature = sign(data, key.getBytes(DEFAULT_ENCODING), algorithm);
return new String(Base64.encodeBase64(signature));
} catch (Exception e) {
throw new ClientException("Unable to calculate a request signature: " + e.getMessage(),
e);
}
} | String function(byte[] data, String key, SigningAlgorithm algorithm) throws ClientException { try { byte[] signature = sign(data, key.getBytes(DEFAULT_ENCODING), algorithm); return new String(Base64.encodeBase64(signature)); } catch (Exception e) { throw new ClientException(STR + e.getMessage(), e); } } | /**
* Computes an RFC 2104-compliant HMAC signature for an array of bytes and returns the result as
* a Base64 encoded string.
*
* @param data Data needed to sign.
* @param key The key to sign data.
* @param algorithm The algorithm to sign data.
* @return String Signed string.
* @throws ClientException ClientException.
*/ | Computes an RFC 2104-compliant HMAC signature for an array of bytes and returns the result as a Base64 encoded string | signAndBase64Encode | {
"repo_name": "NetEase-Cloudsearch/streamproxy-sdk-java",
"path": "streamproxy-sdk-java/src/main/java/com/netease/cloud/auth/AbstractSigner.java",
"license": "apache-2.0",
"size": 5566
} | [
"com.netease.cloud.exception.ClientException",
"org.apache.commons.codec.binary.Base64"
] | import com.netease.cloud.exception.ClientException; import org.apache.commons.codec.binary.Base64; | import com.netease.cloud.exception.*; import org.apache.commons.codec.binary.*; | [
"com.netease.cloud",
"org.apache.commons"
] | com.netease.cloud; org.apache.commons; | 611,132 |
public Future<MultiMap> options(
final RequestOptions requestOptions,
final MultiMap requestHeaders,
final Predicate<Integer> successPredicate) {
Objects.requireNonNull(requestOptions);
Objects.requireNonNull(successPredicate);
final Future<MultiMap> result = Future.future();
context.runOnContext(go -> {
final HttpClientRequest req = client.options(requestOptions)
.handler(response -> {
if (successPredicate.test(response.statusCode())) {
result.complete(response.headers());
} else {
result.fail(new ServiceInvocationException(response.statusCode()));
}
}).exceptionHandler(result::fail);
if (requestHeaders != null) {
req.headers().addAll(requestHeaders);
}
req.end();
});
return result;
} | Future<MultiMap> function( final RequestOptions requestOptions, final MultiMap requestHeaders, final Predicate<Integer> successPredicate) { Objects.requireNonNull(requestOptions); Objects.requireNonNull(successPredicate); final Future<MultiMap> result = Future.future(); context.runOnContext(go -> { final HttpClientRequest req = client.options(requestOptions) .handler(response -> { if (successPredicate.test(response.statusCode())) { result.complete(response.headers()); } else { result.fail(new ServiceInvocationException(response.statusCode())); } }).exceptionHandler(result::fail); if (requestHeaders != null) { req.headers().addAll(requestHeaders); } req.end(); }); return result; } | /**
* Gets options for a resource using an HTTP OPTIONS request.
*
* @param requestOptions The options to use for the request.
* @param requestHeaders The headers to include in the request.
* @param successPredicate A predicate on the returned HTTP status code for determining success.
* @return A future that will succeed if the predicate evaluates to {@code true}. In that case the
* future will contain the response headers.
* @throws NullPointerException if options or predicate are {@code null}.
*/ | Gets options for a resource using an HTTP OPTIONS request | options | {
"repo_name": "dejanb/hono",
"path": "tests/src/test/java/org/eclipse/hono/tests/CrudHttpClient.java",
"license": "epl-1.0",
"size": 19380
} | [
"io.vertx.core.Future",
"io.vertx.core.MultiMap",
"io.vertx.core.http.HttpClientRequest",
"io.vertx.core.http.RequestOptions",
"java.util.Objects",
"java.util.function.Predicate",
"org.eclipse.hono.client.ServiceInvocationException"
] | import io.vertx.core.Future; import io.vertx.core.MultiMap; import io.vertx.core.http.HttpClientRequest; import io.vertx.core.http.RequestOptions; import java.util.Objects; import java.util.function.Predicate; import org.eclipse.hono.client.ServiceInvocationException; | import io.vertx.core.*; import io.vertx.core.http.*; import java.util.*; import java.util.function.*; import org.eclipse.hono.client.*; | [
"io.vertx.core",
"java.util",
"org.eclipse.hono"
] | io.vertx.core; java.util; org.eclipse.hono; | 297,003 |
private void checkDeclaredPropertyInheritance(
NodeTraversal t, Node n, FunctionType ctorType, String propertyName,
JSDocInfo info, JSType propertyType) {
// If the supertype doesn't resolve correctly, we've warned about this
// already.
if (hasUnknownOrEmptySupertype(ctorType)) {
return;
}
FunctionType superClass = ctorType.getSuperClassConstructor();
boolean superClassHasProperty =
superClass != null && superClass.getInstanceType().hasProperty(propertyName);
boolean superClassHasDeclaredProperty =
superClass != null && superClass.getInstanceType().isPropertyTypeDeclared(propertyName);
// For interface
boolean superInterfaceHasProperty = false;
boolean superInterfaceHasDeclaredProperty = false;
if (ctorType.isInterface()) {
for (ObjectType interfaceType : ctorType.getExtendedInterfaces()) {
superInterfaceHasProperty =
superInterfaceHasProperty || interfaceType.hasProperty(propertyName);
superInterfaceHasDeclaredProperty =
superInterfaceHasDeclaredProperty || interfaceType.isPropertyTypeDeclared(propertyName);
}
}
boolean declaredOverride = info != null && info.isOverride();
boolean foundInterfaceProperty = false;
if (ctorType.isConstructor()) {
for (JSType implementedInterface :
ctorType.getAllImplementedInterfaces()) {
if (implementedInterface.isUnknownType() || implementedInterface.isEmptyType()) {
continue;
}
FunctionType interfaceType =
implementedInterface.toObjectType().getConstructor();
checkNotNull(interfaceType);
boolean interfaceHasProperty =
interfaceType.getPrototype().hasProperty(propertyName);
foundInterfaceProperty = foundInterfaceProperty || interfaceHasProperty;
if (!declaredOverride
&& interfaceHasProperty
&& !"__proto__".equals(propertyName)
&& !"constructor".equals(propertyName)) {
// @override not present, but the property does override an interface property
compiler.report(
t.makeError(
n,
HIDDEN_INTERFACE_PROPERTY,
propertyName,
interfaceType.getTopMostDefiningType(propertyName).toString()));
}
}
}
if (!declaredOverride
&& !superClassHasProperty
&& !superInterfaceHasProperty) {
// nothing to do here, it's just a plain new property
return;
}
ObjectType topInstanceType = superClassHasDeclaredProperty
? superClass.getTopMostDefiningType(propertyName) : null;
boolean declaredLocally =
ctorType.isConstructor()
&& (ctorType.getPrototype().hasOwnProperty(propertyName)
|| ctorType.getInstanceType().hasOwnProperty(propertyName));
if (!declaredOverride
&& superClassHasDeclaredProperty
&& declaredLocally
&& !"__proto__".equals(propertyName)
// constructor always "overrides" the superclass "constructor" there is no
// value in marking it with "@override".
&& !"constructor".equals(propertyName)) {
// @override not present, but the property does override a superclass
// property
compiler.report(
t.makeError(n, HIDDEN_SUPERCLASS_PROPERTY, propertyName, topInstanceType.toString()));
}
// @override is present and we have to check that it is ok
if (superClassHasDeclaredProperty) {
// there is a superclass implementation
JSType superClassPropType =
superClass.getInstanceType().getPropertyType(propertyName);
TemplateTypeMap ctorTypeMap =
ctorType.getTypeOfThis().getTemplateTypeMap();
if (!ctorTypeMap.isEmpty()) {
superClassPropType = superClassPropType.visit(
new TemplateTypeMapReplacer(typeRegistry, ctorTypeMap));
}
if (!propertyType.isSubtype(superClassPropType, this.subtypingMode)) {
compiler.report(
t.makeError(n, HIDDEN_SUPERCLASS_PROPERTY_MISMATCH,
propertyName, topInstanceType.toString(),
superClassPropType.toString(), propertyType.toString()));
}
} else if (superInterfaceHasDeclaredProperty) {
// there is an super interface property
for (ObjectType interfaceType : ctorType.getExtendedInterfaces()) {
if (interfaceType.hasProperty(propertyName)) {
JSType superPropertyType =
interfaceType.getPropertyType(propertyName);
if (!propertyType.isSubtype(superPropertyType, this.subtypingMode)) {
topInstanceType = interfaceType.getConstructor().
getTopMostDefiningType(propertyName);
compiler.report(
t.makeError(n, HIDDEN_SUPERCLASS_PROPERTY_MISMATCH,
propertyName, topInstanceType.toString(),
superPropertyType.toString(),
propertyType.toString()));
}
}
}
} else if (!foundInterfaceProperty
&& !superClassHasProperty
&& !superInterfaceHasProperty) {
// there is no superclass nor interface implementation
compiler.report(
t.makeError(n, UNKNOWN_OVERRIDE,
propertyName, ctorType.getInstanceType().toString()));
}
} | void function( NodeTraversal t, Node n, FunctionType ctorType, String propertyName, JSDocInfo info, JSType propertyType) { if (hasUnknownOrEmptySupertype(ctorType)) { return; } FunctionType superClass = ctorType.getSuperClassConstructor(); boolean superClassHasProperty = superClass != null && superClass.getInstanceType().hasProperty(propertyName); boolean superClassHasDeclaredProperty = superClass != null && superClass.getInstanceType().isPropertyTypeDeclared(propertyName); boolean superInterfaceHasProperty = false; boolean superInterfaceHasDeclaredProperty = false; if (ctorType.isInterface()) { for (ObjectType interfaceType : ctorType.getExtendedInterfaces()) { superInterfaceHasProperty = superInterfaceHasProperty interfaceType.hasProperty(propertyName); superInterfaceHasDeclaredProperty = superInterfaceHasDeclaredProperty interfaceType.isPropertyTypeDeclared(propertyName); } } boolean declaredOverride = info != null && info.isOverride(); boolean foundInterfaceProperty = false; if (ctorType.isConstructor()) { for (JSType implementedInterface : ctorType.getAllImplementedInterfaces()) { if (implementedInterface.isUnknownType() implementedInterface.isEmptyType()) { continue; } FunctionType interfaceType = implementedInterface.toObjectType().getConstructor(); checkNotNull(interfaceType); boolean interfaceHasProperty = interfaceType.getPrototype().hasProperty(propertyName); foundInterfaceProperty = foundInterfaceProperty interfaceHasProperty; if (!declaredOverride && interfaceHasProperty && !STR.equals(propertyName) && !STR.equals(propertyName)) { compiler.report( t.makeError( n, HIDDEN_INTERFACE_PROPERTY, propertyName, interfaceType.getTopMostDefiningType(propertyName).toString())); } } } if (!declaredOverride && !superClassHasProperty && !superInterfaceHasProperty) { return; } ObjectType topInstanceType = superClassHasDeclaredProperty ? superClass.getTopMostDefiningType(propertyName) : null; boolean declaredLocally = ctorType.isConstructor() && (ctorType.getPrototype().hasOwnProperty(propertyName) ctorType.getInstanceType().hasOwnProperty(propertyName)); if (!declaredOverride && superClassHasDeclaredProperty && declaredLocally && !STR.equals(propertyName) && !STR.equals(propertyName)) { compiler.report( t.makeError(n, HIDDEN_SUPERCLASS_PROPERTY, propertyName, topInstanceType.toString())); } if (superClassHasDeclaredProperty) { JSType superClassPropType = superClass.getInstanceType().getPropertyType(propertyName); TemplateTypeMap ctorTypeMap = ctorType.getTypeOfThis().getTemplateTypeMap(); if (!ctorTypeMap.isEmpty()) { superClassPropType = superClassPropType.visit( new TemplateTypeMapReplacer(typeRegistry, ctorTypeMap)); } if (!propertyType.isSubtype(superClassPropType, this.subtypingMode)) { compiler.report( t.makeError(n, HIDDEN_SUPERCLASS_PROPERTY_MISMATCH, propertyName, topInstanceType.toString(), superClassPropType.toString(), propertyType.toString())); } } else if (superInterfaceHasDeclaredProperty) { for (ObjectType interfaceType : ctorType.getExtendedInterfaces()) { if (interfaceType.hasProperty(propertyName)) { JSType superPropertyType = interfaceType.getPropertyType(propertyName); if (!propertyType.isSubtype(superPropertyType, this.subtypingMode)) { topInstanceType = interfaceType.getConstructor(). getTopMostDefiningType(propertyName); compiler.report( t.makeError(n, HIDDEN_SUPERCLASS_PROPERTY_MISMATCH, propertyName, topInstanceType.toString(), superPropertyType.toString(), propertyType.toString())); } } } } else if (!foundInterfaceProperty && !superClassHasProperty && !superInterfaceHasProperty) { compiler.report( t.makeError(n, UNKNOWN_OVERRIDE, propertyName, ctorType.getInstanceType().toString())); } } | /**
* Given a constructor type and a property name, check that the property has
* the JSDoc annotation @override iff the property is declared on a
* superclass. Several checks regarding inheritance correctness are also
* performed.
*/ | Given a constructor type and a property name, check that the property has the JSDoc annotation @override iff the property is declared on a superclass. Several checks regarding inheritance correctness are also performed | checkDeclaredPropertyInheritance | {
"repo_name": "tiobe/closure-compiler",
"path": "src/com/google/javascript/jscomp/TypeCheck.java",
"license": "apache-2.0",
"size": 113785
} | [
"com.google.common.base.Preconditions",
"com.google.javascript.rhino.JSDocInfo",
"com.google.javascript.rhino.Node",
"com.google.javascript.rhino.jstype.FunctionType",
"com.google.javascript.rhino.jstype.JSType",
"com.google.javascript.rhino.jstype.ObjectType",
"com.google.javascript.rhino.jstype.TemplateTypeMap",
"com.google.javascript.rhino.jstype.TemplateTypeMapReplacer"
] | import com.google.common.base.Preconditions; import com.google.javascript.rhino.JSDocInfo; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.jstype.FunctionType; import com.google.javascript.rhino.jstype.JSType; import com.google.javascript.rhino.jstype.ObjectType; import com.google.javascript.rhino.jstype.TemplateTypeMap; import com.google.javascript.rhino.jstype.TemplateTypeMapReplacer; | import com.google.common.base.*; import com.google.javascript.rhino.*; import com.google.javascript.rhino.jstype.*; | [
"com.google.common",
"com.google.javascript"
] | com.google.common; com.google.javascript; | 2,115,415 |
void onDrawnToSurface(Surface surface);
}
// TODO: Use MediaFormat constants if these get exposed through the API. See
// [Internal: b/14127601].
private static final String KEY_CROP_LEFT = "crop-left";
private static final String KEY_CROP_RIGHT = "crop-right";
private static final String KEY_CROP_BOTTOM = "crop-bottom";
private static final String KEY_CROP_TOP = "crop-top";
public static final int MSG_SET_SURFACE = 1;
private final VideoFrameReleaseTimeHelper frameReleaseTimeHelper;
private final EventListener eventListener;
private final long allowedJoiningTimeUs;
private final int videoScalingMode;
private final int maxDroppedFrameCountToNotify;
private Surface surface;
private boolean reportedDrawnToSurface;
private boolean renderedFirstFrame;
private long joiningDeadlineUs;
private long droppedFrameAccumulationStartTimeMs;
private int droppedFrameCount;
private int consecutiveDroppedFrameCount;
private int pendingRotationDegrees;
private float pendingPixelWidthHeightRatio;
private int currentWidth;
private int currentHeight;
private int currentUnappliedRotationDegrees;
private float currentPixelWidthHeightRatio;
private int lastReportedWidth;
private int lastReportedHeight;
private int lastReportedUnappliedRotationDegrees;
private float lastReportedPixelWidthHeightRatio;
public MediaCodecVideoTrackRenderer(Context context, SampleSource source,
MediaCodecSelector mediaCodecSelector, int videoScalingMode) {
this(context, source, mediaCodecSelector, videoScalingMode, 0);
}
public MediaCodecVideoTrackRenderer(Context context, SampleSource source,
MediaCodecSelector mediaCodecSelector, int videoScalingMode, long allowedJoiningTimeMs) {
this(context, source, mediaCodecSelector, videoScalingMode, allowedJoiningTimeMs, null, null,
-1);
}
public MediaCodecVideoTrackRenderer(Context context, SampleSource source,
MediaCodecSelector mediaCodecSelector, int videoScalingMode, long allowedJoiningTimeMs,
Handler eventHandler, EventListener eventListener, int maxDroppedFrameCountToNotify) {
this(context, source, mediaCodecSelector, videoScalingMode, allowedJoiningTimeMs, null, false,
eventHandler, eventListener, maxDroppedFrameCountToNotify);
}
public MediaCodecVideoTrackRenderer(Context context, SampleSource source,
MediaCodecSelector mediaCodecSelector, int videoScalingMode, long allowedJoiningTimeMs,
DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
boolean playClearSamplesWithoutKeys, Handler eventHandler, EventListener eventListener,
int maxDroppedFrameCountToNotify) {
super(source, mediaCodecSelector, drmSessionManager, playClearSamplesWithoutKeys, eventHandler,
eventListener);
this.frameReleaseTimeHelper = new VideoFrameReleaseTimeHelper(context);
this.videoScalingMode = videoScalingMode;
this.allowedJoiningTimeUs = allowedJoiningTimeMs * 1000;
this.eventListener = eventListener;
this.maxDroppedFrameCountToNotify = maxDroppedFrameCountToNotify;
joiningDeadlineUs = -1;
currentWidth = -1;
currentHeight = -1;
currentPixelWidthHeightRatio = -1;
pendingPixelWidthHeightRatio = -1;
lastReportedWidth = -1;
lastReportedHeight = -1;
lastReportedPixelWidthHeightRatio = -1;
} | void onDrawnToSurface(Surface surface); } private static final String KEY_CROP_LEFT = STR; private static final String KEY_CROP_RIGHT = STR; private static final String KEY_CROP_BOTTOM = STR; private static final String KEY_CROP_TOP = STR; public static final int MSG_SET_SURFACE = 1; private final VideoFrameReleaseTimeHelper frameReleaseTimeHelper; private final EventListener eventListener; private final long allowedJoiningTimeUs; private final int videoScalingMode; private final int maxDroppedFrameCountToNotify; private Surface surface; private boolean reportedDrawnToSurface; private boolean renderedFirstFrame; private long joiningDeadlineUs; private long droppedFrameAccumulationStartTimeMs; private int droppedFrameCount; private int consecutiveDroppedFrameCount; private int pendingRotationDegrees; private float pendingPixelWidthHeightRatio; private int currentWidth; private int currentHeight; private int currentUnappliedRotationDegrees; private float currentPixelWidthHeightRatio; private int lastReportedWidth; private int lastReportedHeight; private int lastReportedUnappliedRotationDegrees; private float lastReportedPixelWidthHeightRatio; public MediaCodecVideoTrackRenderer(Context context, SampleSource source, MediaCodecSelector mediaCodecSelector, int videoScalingMode) { this(context, source, mediaCodecSelector, videoScalingMode, 0); } public MediaCodecVideoTrackRenderer(Context context, SampleSource source, MediaCodecSelector mediaCodecSelector, int videoScalingMode, long allowedJoiningTimeMs) { this(context, source, mediaCodecSelector, videoScalingMode, allowedJoiningTimeMs, null, null, -1); } public MediaCodecVideoTrackRenderer(Context context, SampleSource source, MediaCodecSelector mediaCodecSelector, int videoScalingMode, long allowedJoiningTimeMs, Handler eventHandler, EventListener eventListener, int maxDroppedFrameCountToNotify) { this(context, source, mediaCodecSelector, videoScalingMode, allowedJoiningTimeMs, null, false, eventHandler, eventListener, maxDroppedFrameCountToNotify); } public MediaCodecVideoTrackRenderer(Context context, SampleSource source, MediaCodecSelector mediaCodecSelector, int videoScalingMode, long allowedJoiningTimeMs, DrmSessionManager<FrameworkMediaCrypto> drmSessionManager, boolean playClearSamplesWithoutKeys, Handler eventHandler, EventListener eventListener, int maxDroppedFrameCountToNotify) { super(source, mediaCodecSelector, drmSessionManager, playClearSamplesWithoutKeys, eventHandler, eventListener); this.frameReleaseTimeHelper = new VideoFrameReleaseTimeHelper(context); this.videoScalingMode = videoScalingMode; this.allowedJoiningTimeUs = allowedJoiningTimeMs * 1000; this.eventListener = eventListener; this.maxDroppedFrameCountToNotify = maxDroppedFrameCountToNotify; joiningDeadlineUs = -1; currentWidth = -1; currentHeight = -1; currentPixelWidthHeightRatio = -1; pendingPixelWidthHeightRatio = -1; lastReportedWidth = -1; lastReportedHeight = -1; lastReportedPixelWidthHeightRatio = -1; } | /**
* Invoked when a frame is rendered to a surface for the first time following that surface
* having been set as the target for the renderer.
*
* @param surface The surface to which a first frame has been rendered.
*/ | Invoked when a frame is rendered to a surface for the first time following that surface having been set as the target for the renderer | onDrawnToSurface | {
"repo_name": "sadegh-arfa/igramApp",
"path": "TMessagesProj/src/main/java/org/telegram/messenger/exoplayer/MediaCodecVideoTrackRenderer.java",
"license": "gpl-2.0",
"size": 25541
} | [
"android.content.Context",
"android.os.Handler",
"android.view.Surface",
"org.telegram.messenger.exoplayer.drm.DrmSessionManager",
"org.telegram.messenger.exoplayer.drm.FrameworkMediaCrypto"
] | import android.content.Context; import android.os.Handler; import android.view.Surface; import org.telegram.messenger.exoplayer.drm.DrmSessionManager; import org.telegram.messenger.exoplayer.drm.FrameworkMediaCrypto; | import android.content.*; import android.os.*; import android.view.*; import org.telegram.messenger.exoplayer.drm.*; | [
"android.content",
"android.os",
"android.view",
"org.telegram.messenger"
] | android.content; android.os; android.view; org.telegram.messenger; | 1,952,939 |
public void disableMiniMap() {
miniMap.hideMiniMap();
}
////////////////
// WIDGETS
////////////////
private Point p = new Point(0, 0); // this is for speed - faster not to re-create Points
| void function() { miniMap.hideMiniMap(); } private Point p = new Point(0, 0); | /**
* Disables the MiniMap from canvas
*
*/ | Disables the MiniMap from canvas | disableMiniMap | {
"repo_name": "laurentschall/openblocks",
"path": "src/main/java/edu/mit/blocks/workspace/Workspace.java",
"license": "lgpl-3.0",
"size": 36613
} | [
"java.awt.Point"
] | import java.awt.Point; | import java.awt.*; | [
"java.awt"
] | java.awt; | 903,217 |
protected Appearance createAppearance() {
Appearance app = new Appearance();
ColoringAttributes ca = new ColoringAttributes(new Color3f(
java.awt.Color.GREEN), ColoringAttributes.SHADE_FLAT);
app.setColoringAttributes(ca);
return app;
}
| Appearance function() { Appearance app = new Appearance(); ColoringAttributes ca = new ColoringAttributes(new Color3f( java.awt.Color.GREEN), ColoringAttributes.SHADE_FLAT); app.setColoringAttributes(ca); return app; } | /**
* Convenience method for creating Appearance object.
*
* @return appearance object with color.
*/ | Convenience method for creating Appearance object | createAppearance | {
"repo_name": "orlanthi/healpix",
"path": "src/java/src/healpix/plot3d/gui/healpix3d/HealSphere.java",
"license": "gpl-2.0",
"size": 4616
} | [
"javax.media.j3d.Appearance",
"javax.media.j3d.ColoringAttributes",
"javax.vecmath.Color3f"
] | import javax.media.j3d.Appearance; import javax.media.j3d.ColoringAttributes; import javax.vecmath.Color3f; | import javax.media.j3d.*; import javax.vecmath.*; | [
"javax.media",
"javax.vecmath"
] | javax.media; javax.vecmath; | 1,500,042 |
@Override
public Boolean incluirProcessoBloco(UnidadeSei unidade, String idBloco, String protocoloProcedimento) {
String retorno = seiPortType.incluirProcessoBloco(siglaSistema, identificacaoServico, unidade.getId(), idBloco, protocoloProcedimento, null);
return converterRetornoBooleano(retorno);
} | Boolean function(UnidadeSei unidade, String idBloco, String protocoloProcedimento) { String retorno = seiPortType.incluirProcessoBloco(siglaSistema, identificacaoServico, unidade.getId(), idBloco, protocoloProcedimento, null); return converterRetornoBooleano(retorno); } | /**
* Incluir processo bloco.
*
* @param idBloco o(a) id bloco.
* @param protocoloProcedimento o(a) protocolo procedimento.
* @return o valor de boolean
*/ | Incluir processo bloco | incluirProcessoBloco | {
"repo_name": "opensingular/singular-server",
"path": "requirement/requirement-sei-30-connector/src/main/java/org/opensingular/requirement/connector/sei30/SEIWS.java",
"license": "apache-2.0",
"size": 29392
} | [
"org.opensingular.requirement.connector.sei30.model.UnidadeSei"
] | import org.opensingular.requirement.connector.sei30.model.UnidadeSei; | import org.opensingular.requirement.connector.sei30.model.*; | [
"org.opensingular.requirement"
] | org.opensingular.requirement; | 2,722,976 |
String readLine() throws IOException; | String readLine() throws IOException; | /**
* Read a single log line, ala BufferedReader.readLine().
*
* @return
* @throws IOException
*/ | Read a single log line, ala BufferedReader.readLine() | readLine | {
"repo_name": "plusCubed/matlog",
"path": "app/src/main/java/com/pluscubed/logcat/reader/LogcatReader.java",
"license": "gpl-3.0",
"size": 481
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 2,656,340 |
@Override
public void looseUnmarshal(OpenWireFormat wireFormat, Object o, DataInput dataIn) throws IOException {
super.looseUnmarshal(wireFormat, o, dataIn);
ProducerAck info = (ProducerAck) o;
info.setProducerId((ProducerId) looseUnmarsalNestedObject(wireFormat, dataIn));
info.setSize(dataIn.readInt());
} | void function(OpenWireFormat wireFormat, Object o, DataInput dataIn) throws IOException { super.looseUnmarshal(wireFormat, o, dataIn); ProducerAck info = (ProducerAck) o; info.setProducerId((ProducerId) looseUnmarsalNestedObject(wireFormat, dataIn)); info.setSize(dataIn.readInt()); } | /**
* Un-marshal an object instance from the data input stream
*
* @param o
* the object to un-marshal
* @param dataIn
* the data input stream to build the object from
* @throws IOException
*/ | Un-marshal an object instance from the data input stream | looseUnmarshal | {
"repo_name": "apache/activemq-openwire",
"path": "openwire-legacy/src/main/java/org/apache/activemq/openwire/codec/v10/ProducerAckMarshaller.java",
"license": "apache-2.0",
"size": 4395
} | [
"java.io.DataInput",
"java.io.IOException",
"org.apache.activemq.openwire.codec.OpenWireFormat",
"org.apache.activemq.openwire.commands.ProducerAck",
"org.apache.activemq.openwire.commands.ProducerId"
] | import java.io.DataInput; import java.io.IOException; import org.apache.activemq.openwire.codec.OpenWireFormat; import org.apache.activemq.openwire.commands.ProducerAck; import org.apache.activemq.openwire.commands.ProducerId; | import java.io.*; import org.apache.activemq.openwire.codec.*; import org.apache.activemq.openwire.commands.*; | [
"java.io",
"org.apache.activemq"
] | java.io; org.apache.activemq; | 1,816,376 |
static void checkpoint(final DeepLearningParameters oldP, final DeepLearningParameters newP) {
checkCompleteness();
if (newP._nfolds != 0)
throw new UnsupportedOperationException("nfolds must be 0: Cross-validation is not supported during checkpoint restarts.");
if ((newP._valid == null) != (oldP._valid == null)
|| (newP._valid != null && !newP._valid.equals(oldP._valid))) {
throw new IllegalArgumentException("Validation dataset must be the same as for the checkpointed model.");
}
if (!newP._autoencoder && (newP._response_column == null || !newP._response_column.equals(oldP._response_column))) {
throw new IllegalArgumentException("Response column (" + newP._response_column + ") is not the same as for the checkpointed model: " + oldP._response_column);
}
if (!Arrays.equals(newP._hidden, oldP._hidden)) {
throw new IllegalArgumentException("Hidden layers (" + Arrays.toString(newP._hidden) + ") is not the same as for the checkpointed model: " + Arrays.toString(oldP._hidden));
}
if (!Arrays.equals(newP._ignored_columns, oldP._ignored_columns)) {
throw new IllegalArgumentException("Ignored columns must be the same as for the checkpointed model.");
}
//compare the user-given parameters before and after and check that they are not changed
for (Field fBefore : oldP.getClass().getDeclaredFields()) {
if (ArrayUtils.contains(cp_not_modifiable, fBefore.getName())) {
for (Field fAfter : newP.getClass().getDeclaredFields()) {
if (fBefore.equals(fAfter)) {
try {
if (fAfter.get(newP) == null || fBefore.get(oldP) == null || !fBefore.get(oldP).toString().equals(fAfter.get(newP).toString())) { // if either of the two parameters is null, skip the toString()
if (fBefore.get(oldP) == null && fAfter.get(newP) == null)
continue; //if both parameters are null, we don't need to do anything
throw new IllegalArgumentException("Cannot change parameter: '" + fBefore.getName() + "': " + fBefore.get(oldP) + " -> " + fAfter.get(newP));
}
} catch (IllegalAccessException e) {
e.printStackTrace();
}
}
}
}
}
} | static void checkpoint(final DeepLearningParameters oldP, final DeepLearningParameters newP) { checkCompleteness(); if (newP._nfolds != 0) throw new UnsupportedOperationException(STR); if ((newP._valid == null) != (oldP._valid == null) (newP._valid != null && !newP._valid.equals(oldP._valid))) { throw new IllegalArgumentException(STR); } if (!newP._autoencoder && (newP._response_column == null !newP._response_column.equals(oldP._response_column))) { throw new IllegalArgumentException(STR + newP._response_column + STR + oldP._response_column); } if (!Arrays.equals(newP._hidden, oldP._hidden)) { throw new IllegalArgumentException(STR + Arrays.toString(newP._hidden) + STR + Arrays.toString(oldP._hidden)); } if (!Arrays.equals(newP._ignored_columns, oldP._ignored_columns)) { throw new IllegalArgumentException(STR); } for (Field fBefore : oldP.getClass().getDeclaredFields()) { if (ArrayUtils.contains(cp_not_modifiable, fBefore.getName())) { for (Field fAfter : newP.getClass().getDeclaredFields()) { if (fBefore.equals(fAfter)) { try { if (fAfter.get(newP) == null fBefore.get(oldP) == null !fBefore.get(oldP).toString().equals(fAfter.get(newP).toString())) { if (fBefore.get(oldP) == null && fAfter.get(newP) == null) continue; throw new IllegalArgumentException(STR + fBefore.getName() + STR + fBefore.get(oldP) + STR + fAfter.get(newP)); } } catch (IllegalAccessException e) { e.printStackTrace(); } } } } } } | /**
* Check that checkpoint continuation is possible
*
* @param oldP old DL parameters (from checkpoint)
* @param newP new DL parameters (user-given, to restart from checkpoint)
*/ | Check that checkpoint continuation is possible | checkpoint | {
"repo_name": "printedheart/h2o-3",
"path": "h2o-algos/src/main/java/hex/deeplearning/DeepLearningParameters.java",
"license": "apache-2.0",
"size": 40599
} | [
"java.lang.reflect.Field",
"java.util.Arrays"
] | import java.lang.reflect.Field; import java.util.Arrays; | import java.lang.reflect.*; import java.util.*; | [
"java.lang",
"java.util"
] | java.lang; java.util; | 2,500,673 |
public FutureResult<Void> updateProjectModel(final Project project, final Collection<String> maqlDdl) {
notNull(project, "project");
notNull(project.getId(), "project.id");
noNullElements(maqlDdl, "maqlDdl");
if (maqlDdl.isEmpty()) {
throw new IllegalArgumentException("MAQL DDL string(s) should be given");
}
return new PollResult<>(this, new AbstractPollHandlerBase<MaqlDdlLinks, Void>(MaqlDdlLinks.class, Void.class) {
private final String projectId = project.getId();
private final LinkedList<String> maqlChunks = new LinkedList<>(maqlDdl);
private String pollUri;
{
executeNextMaqlChunk();
} | FutureResult<Void> function(final Project project, final Collection<String> maqlDdl) { notNull(project, STR); notNull(project.getId(), STR); noNullElements(maqlDdl, STR); if (maqlDdl.isEmpty()) { throw new IllegalArgumentException(STR); } return new PollResult<>(this, new AbstractPollHandlerBase<MaqlDdlLinks, Void>(MaqlDdlLinks.class, Void.class) { private final String projectId = project.getId(); private final LinkedList<String> maqlChunks = new LinkedList<>(maqlDdl); private String pollUri; { executeNextMaqlChunk(); } | /**
* Update project model with the given update script(s) (MAQL).
*
* @param project project to be updated
* @param maqlDdl update script to be executed in the project
* @return poll result
*
* @see com.gooddata.dataset.DatasetService#updateProjectData
*/ | Update project model with the given update script(s) (MAQL) | updateProjectModel | {
"repo_name": "standevgd/gooddata-java",
"path": "src/main/java/com/gooddata/model/ModelService.java",
"license": "bsd-3-clause",
"size": 7505
} | [
"com.gooddata.AbstractPollHandlerBase",
"com.gooddata.FutureResult",
"com.gooddata.PollResult",
"com.gooddata.project.Project",
"com.gooddata.util.Validate",
"java.util.Collection",
"java.util.LinkedList"
] | import com.gooddata.AbstractPollHandlerBase; import com.gooddata.FutureResult; import com.gooddata.PollResult; import com.gooddata.project.Project; import com.gooddata.util.Validate; import java.util.Collection; import java.util.LinkedList; | import com.gooddata.*; import com.gooddata.project.*; import com.gooddata.util.*; import java.util.*; | [
"com.gooddata",
"com.gooddata.project",
"com.gooddata.util",
"java.util"
] | com.gooddata; com.gooddata.project; com.gooddata.util; java.util; | 1,342,462 |
ReportDAO reportDAO = DAOFactory.getInstance().getReportDAO(); | ReportDAO reportDAO = DAOFactory.getInstance().getReportDAO(); | /**
* Processes the page request by including the corresponding JSP page to the response.
*
* @param pageRequestContext Page request context
*/ | Processes the page request by including the corresponding JSP page to the response | process | {
"repo_name": "leafsoftinfo/pyramus",
"path": "pyramus/src/main/java/fi/pyramus/views/reports/ListReportsViewController.java",
"license": "gpl-3.0",
"size": 2453
} | [
"fi.pyramus.dao.DAOFactory",
"fi.pyramus.dao.reports.ReportDAO"
] | import fi.pyramus.dao.DAOFactory; import fi.pyramus.dao.reports.ReportDAO; | import fi.pyramus.dao.*; import fi.pyramus.dao.reports.*; | [
"fi.pyramus.dao"
] | fi.pyramus.dao; | 2,076,285 |
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<GatewayRouteListResultInner> getAdvertisedRoutesAsync(
String resourceGroupName, String virtualNetworkGatewayName, String peer) {
return beginGetAdvertisedRoutesAsync(resourceGroupName, virtualNetworkGatewayName, peer)
.last()
.flatMap(this.client::getLroFinalResultOrError);
} | @ServiceMethod(returns = ReturnType.SINGLE) Mono<GatewayRouteListResultInner> function( String resourceGroupName, String virtualNetworkGatewayName, String peer) { return beginGetAdvertisedRoutesAsync(resourceGroupName, virtualNetworkGatewayName, peer) .last() .flatMap(this.client::getLroFinalResultOrError); } | /**
* This operation retrieves a list of routes the virtual network gateway is advertising to the specified peer.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkGatewayName The name of the virtual network gateway.
* @param peer The IP address of the peer.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of virtual network gateway routes.
*/ | This operation retrieves a list of routes the virtual network gateway is advertising to the specified peer | getAdvertisedRoutesAsync | {
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-network/src/main/java/com/azure/resourcemanager/network/implementation/VirtualNetworkGatewaysClientImpl.java",
"license": "mit",
"size": 322151
} | [
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.resourcemanager.network.fluent.models.GatewayRouteListResultInner"
] | import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.resourcemanager.network.fluent.models.GatewayRouteListResultInner; | import com.azure.core.annotation.*; import com.azure.resourcemanager.network.fluent.models.*; | [
"com.azure.core",
"com.azure.resourcemanager"
] | com.azure.core; com.azure.resourcemanager; | 2,797,380 |
public void setEmptyText(CharSequence emptyText) {
View emptyView = mListView.getEmptyView();
if (emptyView instanceof TextView) {
((TextView) emptyView).setText(emptyText);
}
} | void function(CharSequence emptyText) { View emptyView = mListView.getEmptyView(); if (emptyView instanceof TextView) { ((TextView) emptyView).setText(emptyText); } } | /**
* The default content for this Fragment has a TextView that is shown when
* the list is empty. If you would like to change the text, call this method
* to supply the text it should use.
*/ | The default content for this Fragment has a TextView that is shown when the list is empty. If you would like to change the text, call this method to supply the text it should use | setEmptyText | {
"repo_name": "erikchenmelbourne/UnwelcomeCaller",
"path": "Application/src/main/java/com/example/android/donebar/ItemFragment.java",
"license": "apache-2.0",
"size": 5035
} | [
"android.view.View",
"android.widget.TextView"
] | import android.view.View; import android.widget.TextView; | import android.view.*; import android.widget.*; | [
"android.view",
"android.widget"
] | android.view; android.widget; | 547,429 |
public void migrateFragmentInstance(int oldFragmentID, int newFragmentID) {
DbDataObject dbDataObject = new DbDataObject();
String update = "UPDATE fragmentinstance " +
"SET fragment_id = " + newFragmentID +
" WHERE fragment_id = " + oldFragmentID;
dbDataObject.executeUpdateStatement(update);
} | void function(int oldFragmentID, int newFragmentID) { DbDataObject dbDataObject = new DbDataObject(); String update = STR + STR + newFragmentID + STR + oldFragmentID; dbDataObject.executeUpdateStatement(update); } | /**
* Change all oldFragmentIDs to newFragmentIDs of all running instances
* with oldFragmentID in table "fragmentinstance".
*
* @param oldFragmentID fragmentID that gets updated by newFragmentId
* @param newFragmentID new fragmentID that overwrites oldFragmentId
*/ | Change all oldFragmentIDs to newFragmentIDs of all running instances with oldFragmentID in table "fragmentinstance" | migrateFragmentInstance | {
"repo_name": "BP2014W1/JEngine",
"path": "src/main/java/de/uni_potsdam/hpi/bpt/bp2014/jcomparser/Connector.java",
"license": "mit",
"size": 31380
} | [
"de.uni_potsdam.hpi.bpt.bp2014.database.DbDataObject"
] | import de.uni_potsdam.hpi.bpt.bp2014.database.DbDataObject; | import de.uni_potsdam.hpi.bpt.bp2014.database.*; | [
"de.uni_potsdam.hpi"
] | de.uni_potsdam.hpi; | 914,750 |
private void addPhone(int type, String data, String label, boolean isPrimary) {
if (mPhoneList == null) {
mPhoneList = new ArrayList<PhoneData>();
}
StringBuilder builder = new StringBuilder();
String trimed = data.trim();
int length = trimed.length();
for (int i = 0; i < length; i++) {
char ch = trimed.charAt(i);
if (('0' <= ch && ch <= '9') || (i == 0 && ch == '+')) {
builder.append(ch);
}
}
PhoneData phoneData = new PhoneData(type,
// modify by jhnie for USA style phone number, if format phone number,
// +8615810612253 ->861-58-061-2253, it's wrong.
// PhoneNumberUtils.formatNumber(builder.toString()),
builder.toString(), label, isPrimary);
mPhoneList.add(phoneData);
} | void function(int type, String data, String label, boolean isPrimary) { if (mPhoneList == null) { mPhoneList = new ArrayList<PhoneData>(); } StringBuilder builder = new StringBuilder(); String trimed = data.trim(); int length = trimed.length(); for (int i = 0; i < length; i++) { char ch = trimed.charAt(i); if (('0' <= ch && ch <= '9') (i == 0 && ch == '+')) { builder.append(ch); } } PhoneData phoneData = new PhoneData(type, builder.toString(), label, isPrimary); mPhoneList.add(phoneData); } | /**
* Add a phone info to phoneList.
*
* @param data
* phone number
* @param type
* type col of content://contacts/phones
* @param label
* lable col of content://contacts/phones
*/ | Add a phone info to phoneList | addPhone | {
"repo_name": "rex-xxx/mt6572_x201",
"path": "mediatek/packages/apps/iSMS/src/com/hissage/vcard/ContactStruct.java",
"license": "gpl-2.0",
"size": 66433
} | [
"java.util.ArrayList"
] | import java.util.ArrayList; | import java.util.*; | [
"java.util"
] | java.util; | 2,125,042 |
public int getSelectedStyle() {
if (this.bold.isSelected() && this.italic.isSelected()) {
return Font.BOLD + Font.ITALIC;
}
if (this.bold.isSelected()) {
return Font.BOLD;
}
if (this.italic.isSelected()) {
return Font.ITALIC;
}
else {
return Font.PLAIN;
}
} | int function() { if (this.bold.isSelected() && this.italic.isSelected()) { return Font.BOLD + Font.ITALIC; } if (this.bold.isSelected()) { return Font.BOLD; } if (this.italic.isSelected()) { return Font.ITALIC; } else { return Font.PLAIN; } } | /**
* Returns the selected style.
*
* @return the style.
*/ | Returns the selected style | getSelectedStyle | {
"repo_name": "akardapolov/ASH-Viewer",
"path": "jfreechart-fse/src/main/java/org/jfree/chart/ui/FontChooserPanel.java",
"license": "gpl-3.0",
"size": 7905
} | [
"java.awt.Font"
] | import java.awt.Font; | import java.awt.*; | [
"java.awt"
] | java.awt; | 2,039,970 |
public void status() throws Exception {
System.out.println("STATUS OF " + name.toUpperCase());
if (x != null) {
StringBuffer sb = new StringBuffer();
if (x.getAnonymityState()) {
sb.append("anon, ");
}
if (x.getConfState()) {
sb.append("conf, ");
}
if (x.getCredDelegState()) {
sb.append("deleg, ");
}
if (x.getIntegState()) {
sb.append("integ, ");
}
if (x.getMutualAuthState()) {
sb.append("mutual, ");
}
if (x.getReplayDetState()) {
sb.append("rep det, ");
}
if (x.getSequenceDetState()) {
sb.append("seq det, ");
}
System.out.println(" Context status of " + name + ": " + sb.toString());
if (x.isProtReady() || x.isEstablished()) {
System.out.println(" " + x.getSrcName() + " -> " + x.getTargName());
}
}
xstatus();
if (s != null) {
System.out.println("====== START SUBJECT CONTENT =====");
for (Principal p : s.getPrincipals()) {
System.out.println(" Principal: " + p);
}
for (Object o : s.getPublicCredentials()) {
System.out.println(" " + o.getClass());
System.out.println(" " + o);
}
System.out.println("====== Private Credentials Set ======");
for (Object o : s.getPrivateCredentials()) {
System.out.println(" " + o.getClass());
if (o instanceof KerberosTicket) {
KerberosTicket kt = (KerberosTicket) o;
System.out.println(" " + kt.getServer() + " for " + kt.getClient());
} else if (o instanceof KerberosKey) {
KerberosKey kk = (KerberosKey) o;
System.out.print(" " + kk.getKeyType() + " " + kk.getVersionNumber() + " " + kk.getAlgorithm() + " ");
for (byte b : kk.getEncoded()) {
System.out.printf("%02X", b & 0xff);
}
System.out.println();
} else if (o instanceof Map) {
Map map = (Map) o;
for (Object k : map.keySet()) {
System.out.println(" " + k + ": " + map.get(k));
}
} else {
System.out.println(" " + o);
}
}
System.out.println("====== END SUBJECT CONTENT =====");
}
} | void function() throws Exception { System.out.println(STR + name.toUpperCase()); if (x != null) { StringBuffer sb = new StringBuffer(); if (x.getAnonymityState()) { sb.append(STR); } if (x.getConfState()) { sb.append(STR); } if (x.getCredDelegState()) { sb.append(STR); } if (x.getIntegState()) { sb.append(STR); } if (x.getMutualAuthState()) { sb.append(STR); } if (x.getReplayDetState()) { sb.append(STR); } if (x.getSequenceDetState()) { sb.append(STR); } System.out.println(STR + name + STR + sb.toString()); if (x.isProtReady() x.isEstablished()) { System.out.println(" " + x.getSrcName() + STR + x.getTargName()); } } xstatus(); if (s != null) { System.out.println(STR); for (Principal p : s.getPrincipals()) { System.out.println(STR + p); } for (Object o : s.getPublicCredentials()) { System.out.println(" " + o.getClass()); System.out.println(" " + o); } System.out.println(STR); for (Object o : s.getPrivateCredentials()) { System.out.println(" " + o.getClass()); if (o instanceof KerberosTicket) { KerberosTicket kt = (KerberosTicket) o; System.out.println(" " + kt.getServer() + STR + kt.getClient()); } else if (o instanceof KerberosKey) { KerberosKey kk = (KerberosKey) o; System.out.print(" " + kk.getKeyType() + " " + kk.getVersionNumber() + " " + kk.getAlgorithm() + " "); for (byte b : kk.getEncoded()) { System.out.printf("%02X", b & 0xff); } System.out.println(); } else if (o instanceof Map) { Map map = (Map) o; for (Object k : map.keySet()) { System.out.println(" " + k + STR + map.get(k)); } } else { System.out.println(" " + o); } } System.out.println(STR); } } | /**
* Prints status of GSSContext and Subject
* @throws java.lang.Exception
*/ | Prints status of GSSContext and Subject | status | {
"repo_name": "md-5/jdk10",
"path": "test/jdk/sun/security/krb5/auto/Context.java",
"license": "gpl-2.0",
"size": 26566
} | [
"java.security.Principal",
"java.util.Map",
"javax.security.auth.kerberos.KerberosKey",
"javax.security.auth.kerberos.KerberosTicket"
] | import java.security.Principal; import java.util.Map; import javax.security.auth.kerberos.KerberosKey; import javax.security.auth.kerberos.KerberosTicket; | import java.security.*; import java.util.*; import javax.security.auth.kerberos.*; | [
"java.security",
"java.util",
"javax.security"
] | java.security; java.util; javax.security; | 482,257 |
public Postcard build(Uri url) {
return _ARouter.getInstance().build(url);
} | Postcard function(Uri url) { return _ARouter.getInstance().build(url); } | /**
* Build the roadmap, draw a postcard.
*
* @param url the path
*/ | Build the roadmap, draw a postcard | build | {
"repo_name": "leesocrates/remind",
"path": "arouter-api/src/main/java/com/alibaba/android/arouter/launcher/ARouter.java",
"license": "gpl-3.0",
"size": 4809
} | [
"android.net.Uri",
"com.alibaba.android.arouter.facade.Postcard"
] | import android.net.Uri; import com.alibaba.android.arouter.facade.Postcard; | import android.net.*; import com.alibaba.android.arouter.facade.*; | [
"android.net",
"com.alibaba.android"
] | android.net; com.alibaba.android; | 900,344 |
private void attachmentFolderFileChooserActionPerformed(ActionEvent evt) { // NOSONAR This method is used through lambda
File chosen = attachmentFileChooser.getSelectedFile();
if (chosen == null) {
return;
}
final String attachments = tfAttachment.getText().trim();
if (attachments.length() > 0) {
tfAttachment.setText(attachments
+ SmtpSampler.FILENAME_SEPARATOR
+ chosen.getAbsolutePath());
} else {
tfAttachment.setText(chosen.getAbsolutePath());
}
} | void function(ActionEvent evt) { File chosen = attachmentFileChooser.getSelectedFile(); if (chosen == null) { return; } final String attachments = tfAttachment.getText().trim(); if (attachments.length() > 0) { tfAttachment.setText(attachments + SmtpSampler.FILENAME_SEPARATOR + chosen.getAbsolutePath()); } else { tfAttachment.setText(chosen.getAbsolutePath()); } } | /**
* ActionPerformed-method for filechoser "attachmentFileChoser", creates
* FileChoser-Object
*
* @param evt
* ActionEvent to be handled
*/ | ActionPerformed-method for filechoser "attachmentFileChoser", creates FileChoser-Object | attachmentFolderFileChooserActionPerformed | {
"repo_name": "ra0077/jmeter",
"path": "src/protocol/mail/org/apache/jmeter/protocol/smtp/sampler/gui/SmtpPanel.java",
"license": "apache-2.0",
"size": 38588
} | [
"java.awt.event.ActionEvent",
"java.io.File",
"org.apache.jmeter.protocol.smtp.sampler.SmtpSampler"
] | import java.awt.event.ActionEvent; import java.io.File; import org.apache.jmeter.protocol.smtp.sampler.SmtpSampler; | import java.awt.event.*; import java.io.*; import org.apache.jmeter.protocol.smtp.sampler.*; | [
"java.awt",
"java.io",
"org.apache.jmeter"
] | java.awt; java.io; org.apache.jmeter; | 636,703 |
@ServiceMethod(returns = ReturnType.SINGLE)
public PollerFlux<PollResult<Void>, Void> beginDeleteAsync(
String resourceGroupName, String circuitName, String peeringName, String connectionName) {
Mono<Response<Flux<ByteBuffer>>> mono =
deleteWithResponseAsync(resourceGroupName, circuitName, peeringName, connectionName);
return this
.client
.<Void, Void>getLroResult(mono, this.client.getHttpPipeline(), Void.class, Void.class, Context.NONE);
} | @ServiceMethod(returns = ReturnType.SINGLE) PollerFlux<PollResult<Void>, Void> function( String resourceGroupName, String circuitName, String peeringName, String connectionName) { Mono<Response<Flux<ByteBuffer>>> mono = deleteWithResponseAsync(resourceGroupName, circuitName, peeringName, connectionName); return this .client .<Void, Void>getLroResult(mono, this.client.getHttpPipeline(), Void.class, Void.class, Context.NONE); } | /**
* Deletes the specified Express Route Circuit Connection from the specified express route circuit.
*
* @param resourceGroupName The name of the resource group.
* @param circuitName The name of the express route circuit.
* @param peeringName The name of the peering.
* @param connectionName The name of the express route circuit connection.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/ | Deletes the specified Express Route Circuit Connection from the specified express route circuit | beginDeleteAsync | {
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-network/src/main/java/com/azure/resourcemanager/network/implementation/ExpressRouteCircuitConnectionsClientImpl.java",
"license": "mit",
"size": 63543
} | [
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.http.rest.Response",
"com.azure.core.management.polling.PollResult",
"com.azure.core.util.Context",
"com.azure.core.util.polling.PollerFlux",
"java.nio.ByteBuffer"
] | import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.Response; import com.azure.core.management.polling.PollResult; import com.azure.core.util.Context; import com.azure.core.util.polling.PollerFlux; import java.nio.ByteBuffer; | import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.core.management.polling.*; import com.azure.core.util.*; import com.azure.core.util.polling.*; import java.nio.*; | [
"com.azure.core",
"java.nio"
] | com.azure.core; java.nio; | 1,946,887 |
public void normalConnect( String partitionId ) throws KettleDatabaseException {
if ( databaseMeta == null ) {
throw new KettleDatabaseException( "No valid database connection defined!" );
}
try {
// First see if we use connection pooling...
//
if ( databaseMeta.isUsingConnectionPool() && // default = false for
// backward compatibility
databaseMeta.getAccessType() != DatabaseMeta.TYPE_ACCESS_JNDI // JNDI
// does
// pooling
// on
// it's
// own.
) {
try {
this.connection = ConnectionPoolUtil.getConnection( log, databaseMeta, partitionId );
if ( getConnection().getAutoCommit() != isAutoCommit() ) {
setAutoCommit( isAutoCommit() );
}
} catch ( Exception e ) {
throw new KettleDatabaseException( "Error occured while trying to connect to the database", e );
}
} else {
connectUsingClass( databaseMeta.getDriverClass(), partitionId );
if ( log.isDetailed() ) {
log.logDetailed( "Connected to database." );
}
// See if we need to execute extra SQL statemtent...
String sql = environmentSubstitute( databaseMeta.getConnectSQL() );
// only execute if the SQL is not empty, null and is not just a bunch of
// spaces, tabs, CR etc.
if ( !Const.isEmpty( sql ) && !Const.onlySpaces( sql ) ) {
execStatements( sql );
if ( log.isDetailed() ) {
log.logDetailed( "Executed connect time SQL statements:" + Const.CR + sql );
}
}
}
} catch ( Exception e ) {
throw new KettleDatabaseException( "Error occured while trying to connect to the database", e );
}
} | void function( String partitionId ) throws KettleDatabaseException { if ( databaseMeta == null ) { throw new KettleDatabaseException( STR ); } try { databaseMeta.getAccessType() != DatabaseMeta.TYPE_ACCESS_JNDI ) { try { this.connection = ConnectionPoolUtil.getConnection( log, databaseMeta, partitionId ); if ( getConnection().getAutoCommit() != isAutoCommit() ) { setAutoCommit( isAutoCommit() ); } } catch ( Exception e ) { throw new KettleDatabaseException( STR, e ); } } else { connectUsingClass( databaseMeta.getDriverClass(), partitionId ); if ( log.isDetailed() ) { log.logDetailed( STR ); } String sql = environmentSubstitute( databaseMeta.getConnectSQL() ); if ( !Const.isEmpty( sql ) && !Const.onlySpaces( sql ) ) { execStatements( sql ); if ( log.isDetailed() ) { log.logDetailed( STR + Const.CR + sql ); } } } } catch ( Exception e ) { throw new KettleDatabaseException( STR, e ); } } | /**
* Open the database connection.
*
* @param partitionId
* the partition ID in the cluster to connect to.
* @throws KettleDatabaseException
* if something went wrong.
*/ | Open the database connection | normalConnect | {
"repo_name": "andrei-viaryshka/pentaho-kettle",
"path": "core/src/org/pentaho/di/core/database/Database.java",
"license": "apache-2.0",
"size": 162372
} | [
"org.pentaho.di.core.Const",
"org.pentaho.di.core.exception.KettleDatabaseException"
] | import org.pentaho.di.core.Const; import org.pentaho.di.core.exception.KettleDatabaseException; | import org.pentaho.di.core.*; import org.pentaho.di.core.exception.*; | [
"org.pentaho.di"
] | org.pentaho.di; | 95,200 |
public static String toJson(Object src, Class<?> classOfSrc)
throws Exception {
if (isBasicType(classOfSrc)) {
return src.toString();
} else if (List.class.isAssignableFrom(classOfSrc)) {
JSONArray jsonArray = new JSONArray();
List<?> list = (List<?>) src;
for (Object object : list) {
jsonArray.put(new JSONObject(toJson(object)));
}
return jsonArray.toString();
} else if (classOfSrc.isArray()) {
JSONArray jsonArray = new JSONArray();
Object[] objArray = (Object[]) src;
for (Object object : objArray) {
jsonArray.put(new JSONObject(toJson(object)));
}
return jsonArray.toString();
} else {
return object2Json(src);
}
}
| static String function(Object src, Class<?> classOfSrc) throws Exception { if (isBasicType(classOfSrc)) { return src.toString(); } else if (List.class.isAssignableFrom(classOfSrc)) { JSONArray jsonArray = new JSONArray(); List<?> list = (List<?>) src; for (Object object : list) { jsonArray.put(new JSONObject(toJson(object))); } return jsonArray.toString(); } else if (classOfSrc.isArray()) { JSONArray jsonArray = new JSONArray(); Object[] objArray = (Object[]) src; for (Object object : objArray) { jsonArray.put(new JSONObject(toJson(object))); } return jsonArray.toString(); } else { return object2Json(src); } } | /**
* Convert the object to json string, this will return a string formatted by json.
*
* @param src source object
* @param classOfSrc the class of this object
* @return json string
* @throws Exception
*/ | Convert the object to json string, this will return a string formatted by json | toJson | {
"repo_name": "AndroidKnife/Utils",
"path": "json/src/main/java/com/hwangjr/utils/json/JSONResolver.java",
"license": "apache-2.0",
"size": 17019
} | [
"java.util.List",
"org.json.JSONArray",
"org.json.JSONObject"
] | import java.util.List; import org.json.JSONArray; import org.json.JSONObject; | import java.util.*; import org.json.*; | [
"java.util",
"org.json"
] | java.util; org.json; | 600,023 |
protected HashMap getFieldMap(Class cl) {
HashMap fieldMap = new HashMap();
for (; cl != null; cl = cl.getSuperclass()) {
Field[] fields = cl.getDeclaredFields();
for (int i = 0; i < fields.length; i++) {
Field field = fields[i];
if (Modifier.isTransient(field.getModifiers()) ||
Modifier.isStatic(field.getModifiers()))
continue;
// XXX: could parameterize the handler to only deal with public
field.setAccessible(true);
fieldMap.put(field.getName(), field);
}
}
return fieldMap;
} | HashMap function(Class cl) { HashMap fieldMap = new HashMap(); for (; cl != null; cl = cl.getSuperclass()) { Field[] fields = cl.getDeclaredFields(); for (int i = 0; i < fields.length; i++) { Field field = fields[i]; if (Modifier.isTransient(field.getModifiers()) Modifier.isStatic(field.getModifiers())) continue; field.setAccessible(true); fieldMap.put(field.getName(), field); } } return fieldMap; } | /**
* Creates a map of the classes fields.
*/ | Creates a map of the classes fields | getFieldMap | {
"repo_name": "way-way/dubbo",
"path": "hessian-lite/src/main/java/com/alibaba/com/caucho/hessian/io/HessianSerializerInput.java",
"license": "apache-2.0",
"size": 5795
} | [
"java.lang.reflect.Field",
"java.lang.reflect.Modifier",
"java.util.HashMap"
] | import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.HashMap; | import java.lang.reflect.*; import java.util.*; | [
"java.lang",
"java.util"
] | java.lang; java.util; | 1,391,777 |
boolean haveStatus(PerunSession sess, Member member, Status status); | boolean haveStatus(PerunSession sess, Member member, Status status); | /**
* Return true if member have specified status.
*
* @param sess
* @param member
* @param status
* @return true if member have the specified status
* false otherwise
*/ | Return true if member have specified status | haveStatus | {
"repo_name": "ondrocks/perun",
"path": "perun-core/src/main/java/cz/metacentrum/perun/core/bl/MembersManagerBl.java",
"license": "bsd-2-clause",
"size": 49250
} | [
"cz.metacentrum.perun.core.api.Member",
"cz.metacentrum.perun.core.api.PerunSession",
"cz.metacentrum.perun.core.api.Status"
] | import cz.metacentrum.perun.core.api.Member; import cz.metacentrum.perun.core.api.PerunSession; import cz.metacentrum.perun.core.api.Status; | import cz.metacentrum.perun.core.api.*; | [
"cz.metacentrum.perun"
] | cz.metacentrum.perun; | 1,304,800 |
@Test
public void test_TestDynamic_GET_RoleC_Sucess() throws Exception{
Assert.assertEquals(invoke("/TestDynamic", "GET", "gracie", "biscuit"), HttpURLConnection.HTTP_OK);
} | void function() throws Exception{ Assert.assertEquals(invoke(STR, "GET", STR, STR), HttpURLConnection.HTTP_OK); } | /**
* Test12
* In ServletRegistration.Dynamic, GET access is allowled by RoleC
*/ | Test12 In ServletRegistration.Dynamic, GET access is allowled by RoleC | test_TestDynamic_GET_RoleC_Sucess | {
"repo_name": "apache/geronimo",
"path": "testsuite/javaee6-testsuite/servlet3.0-security-test/src/test/java/org/apache/geronimo/testsuite/servlets/ServletsTest.java",
"license": "apache-2.0",
"size": 9787
} | [
"java.net.HttpURLConnection",
"org.testng.Assert"
] | import java.net.HttpURLConnection; import org.testng.Assert; | import java.net.*; import org.testng.*; | [
"java.net",
"org.testng"
] | java.net; org.testng; | 2,479,207 |
@Test
public void newViewDirection() {
context.checking(new Expectations() {
{
allowing(helper.getTestCell()).getNeighbor(with(any(Direction8.class)));
will(returnValue(null));
}
});
SimpleMovementRule2 rule = initRuleForDirectionTest();
for (Direction8 current : POSSIBLE_DIRECTIONS.keySet()) {
Set<Direction8> s = get(rule, current);
assertThat(s, containsInAnyOrder(POSSIBLE_DIRECTIONS.get(current)));
}
} | void function() { context.checking(new Expectations() { { allowing(helper.getTestCell()).getNeighbor(with(any(Direction8.class))); will(returnValue(null)); } }); SimpleMovementRule2 rule = initRuleForDirectionTest(); for (Direction8 current : POSSIBLE_DIRECTIONS.keySet()) { Set<Direction8> s = get(rule, current); assertThat(s, containsInAnyOrder(POSSIBLE_DIRECTIONS.get(current))); } } | /**
* When no of the five possible neighbors of the current direction is available, individual
* turns randomly.
*/ | When no of the five possible neighbors of the current direction is available, individual turns randomly | newViewDirection | {
"repo_name": "zet-evacuation/evacuation-cellular-automaton",
"path": "src/test/java/org/zet/cellularautomaton/algorithm/rule/SimpleMovementRule2Test.java",
"license": "gpl-2.0",
"size": 28617
} | [
"java.util.Set",
"org.hamcrest.MatcherAssert",
"org.hamcrest.Matchers",
"org.jmock.Expectations",
"org.zetool.common.util.Direction8"
] | import java.util.Set; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; import org.jmock.Expectations; import org.zetool.common.util.Direction8; | import java.util.*; import org.hamcrest.*; import org.jmock.*; import org.zetool.common.util.*; | [
"java.util",
"org.hamcrest",
"org.jmock",
"org.zetool.common"
] | java.util; org.hamcrest; org.jmock; org.zetool.common; | 697,956 |
public static void error(String message, Object... objects) {
CONSOLE_LOGGER.log(Level.SEVERE, message, objects);
}
| static void function(String message, Object... objects) { CONSOLE_LOGGER.log(Level.SEVERE, message, objects); } | /**
* Writes the error message to the console.
*
* @param message the message.
* @param objects the list of message parameters.
*/ | Writes the error message to the console | error | {
"repo_name": "lorislab/mechanic",
"path": "mechanic/src/main/java/org/lorislab/mechanic/logger/Console.java",
"license": "apache-2.0",
"size": 4789
} | [
"java.util.logging.Level"
] | import java.util.logging.Level; | import java.util.logging.*; | [
"java.util"
] | java.util; | 852,391 |
public Invoice markUncollectible(Map<String, Object> params, RequestOptions options)
throws StripeException {
String url =
String.format(
"%s%s",
Stripe.getApiBase(),
String.format(
"/v1/invoices/%s/mark_uncollectible", ApiResource.urlEncodeId(this.getId())));
return ApiResource.request(ApiResource.RequestMethod.POST, url, params, Invoice.class, options);
} | Invoice function(Map<String, Object> params, RequestOptions options) throws StripeException { String url = String.format( "%s%s", Stripe.getApiBase(), String.format( STR, ApiResource.urlEncodeId(this.getId()))); return ApiResource.request(ApiResource.RequestMethod.POST, url, params, Invoice.class, options); } | /**
* Marking an invoice as uncollectible is useful for keeping track of bad debts that can be
* written off for accounting purposes.
*/ | Marking an invoice as uncollectible is useful for keeping track of bad debts that can be written off for accounting purposes | markUncollectible | {
"repo_name": "stripe/stripe-java",
"path": "src/main/java/com/stripe/model/Invoice.java",
"license": "mit",
"size": 73384
} | [
"com.stripe.Stripe",
"com.stripe.exception.StripeException",
"com.stripe.net.ApiResource",
"com.stripe.net.RequestOptions",
"java.util.Map"
] | import com.stripe.Stripe; import com.stripe.exception.StripeException; import com.stripe.net.ApiResource; import com.stripe.net.RequestOptions; import java.util.Map; | import com.stripe.*; import com.stripe.exception.*; import com.stripe.net.*; import java.util.*; | [
"com.stripe",
"com.stripe.exception",
"com.stripe.net",
"java.util"
] | com.stripe; com.stripe.exception; com.stripe.net; java.util; | 1,360,626 |
public static DynamicStringListProperty getList(String key) {
return new DynamicStringListProperty(key, (String) null);
} | static DynamicStringListProperty function(String key) { return new DynamicStringListProperty(key, (String) null); } | /**
* Please only use this as a static variable. Calling getList(..).get()
* repeatedly will probably cause a memory leak
*
* @param key
* @return
*/ | Please only use this as a static variable. Calling getList(..).get() repeatedly will probably cause a memory leak | getList | {
"repo_name": "hibooboo2/cattle",
"path": "code/framework/archaius/src/main/java/io/cattle/platform/archaius/util/ArchaiusUtil.java",
"license": "apache-2.0",
"size": 2637
} | [
"com.netflix.config.DynamicStringListProperty"
] | import com.netflix.config.DynamicStringListProperty; | import com.netflix.config.*; | [
"com.netflix.config"
] | com.netflix.config; | 1,816,960 |
public static boolean isSelected(BindStatus bindStatus, Object candidateValue) {
if (bindStatus == null) {
return (candidateValue == null);
}
// Check obvious equality matches with the candidate first,
// both with the rendered value and with the original value.
Object boundValue = bindStatus.getValue();
if (ObjectUtils.nullSafeEquals(boundValue, candidateValue)) {
return true;
}
Object actualValue = bindStatus.getActualValue();
if (actualValue != null && actualValue != boundValue &&
ObjectUtils.nullSafeEquals(actualValue, candidateValue)) {
return true;
}
if (actualValue != null) {
boundValue = actualValue;
}
else if (boundValue == null) {
return false;
}
// Non-null value but no obvious equality with the candidate value:
// go into more exhaustive comparisons.
boolean selected = false;
if (boundValue.getClass().isArray()) {
selected = collectionCompare(CollectionUtils.arrayToList(boundValue), candidateValue, bindStatus);
}
else if (boundValue instanceof Collection) {
selected = collectionCompare((Collection) boundValue, candidateValue, bindStatus);
}
else if (boundValue instanceof Map) {
selected = mapCompare((Map) boundValue, candidateValue, bindStatus);
}
if (!selected) {
selected = exhaustiveCompare(boundValue, candidateValue, bindStatus.getEditor(), null);
}
return selected;
} | static boolean function(BindStatus bindStatus, Object candidateValue) { if (bindStatus == null) { return (candidateValue == null); } Object boundValue = bindStatus.getValue(); if (ObjectUtils.nullSafeEquals(boundValue, candidateValue)) { return true; } Object actualValue = bindStatus.getActualValue(); if (actualValue != null && actualValue != boundValue && ObjectUtils.nullSafeEquals(actualValue, candidateValue)) { return true; } if (actualValue != null) { boundValue = actualValue; } else if (boundValue == null) { return false; } boolean selected = false; if (boundValue.getClass().isArray()) { selected = collectionCompare(CollectionUtils.arrayToList(boundValue), candidateValue, bindStatus); } else if (boundValue instanceof Collection) { selected = collectionCompare((Collection) boundValue, candidateValue, bindStatus); } else if (boundValue instanceof Map) { selected = mapCompare((Map) boundValue, candidateValue, bindStatus); } if (!selected) { selected = exhaustiveCompare(boundValue, candidateValue, bindStatus.getEditor(), null); } return selected; } | /**
* Returns <code>true</code> if the supplied candidate value is equal to the value bound to
* the supplied {@link BindStatus}. Equality in this case differs from standard Java equality and
* is described in more detail <a href="#equality-contract">here</a>.
*/ | Returns <code>true</code> if the supplied candidate value is equal to the value bound to the supplied <code>BindStatus</code>. Equality in this case differs from standard Java equality and is described in more detail here | isSelected | {
"repo_name": "cbeams-archive/spring-framework-2.5.x",
"path": "src/org/springframework/web/servlet/tags/form/SelectedValueComparator.java",
"license": "apache-2.0",
"size": 8089
} | [
"java.util.Collection",
"java.util.Map",
"org.springframework.util.CollectionUtils",
"org.springframework.util.ObjectUtils",
"org.springframework.web.servlet.support.BindStatus"
] | import java.util.Collection; import java.util.Map; import org.springframework.util.CollectionUtils; import org.springframework.util.ObjectUtils; import org.springframework.web.servlet.support.BindStatus; | import java.util.*; import org.springframework.util.*; import org.springframework.web.servlet.support.*; | [
"java.util",
"org.springframework.util",
"org.springframework.web"
] | java.util; org.springframework.util; org.springframework.web; | 2,084,481 |
void setKey(SecretKey secretKey); | void setKey(SecretKey secretKey); | /**
* Arbitrarily set the secret key to a specific value without removing any stored values. This is primarily
* designed for {@link com.bottlerocketstudios.vault.keys.storage.MemoryOnlyKeyStorage} and typical
* usage would be through the {@link #rekeyStorage(SecretKey)} method.
* <strong>If this key is not the right key, existing data may become permanently unreadable.</strong>
*/ | Arbitrarily set the secret key to a specific value without removing any stored values. This is primarily designed for <code>com.bottlerocketstudios.vault.keys.storage.MemoryOnlyKeyStorage</code> and typical usage would be through the <code>#rekeyStorage(SecretKey)</code> method. If this key is not the right key, existing data may become permanently unreadable | setKey | {
"repo_name": "BottleRocketStudios/Android-Vault",
"path": "AndroidVault/vault/src/main/java/com/bottlerocketstudios/vault/SharedPreferenceVault.java",
"license": "apache-2.0",
"size": 2424
} | [
"javax.crypto.SecretKey"
] | import javax.crypto.SecretKey; | import javax.crypto.*; | [
"javax.crypto"
] | javax.crypto; | 1,339,682 |
public void scan(String... basePackages) {
Assert.notEmpty(basePackages, "At least one base package must be specified");
this.scanner.scan(basePackages);
} | void function(String... basePackages) { Assert.notEmpty(basePackages, STR); this.scanner.scan(basePackages); } | /**
* Perform a scan within the specified base packages.
* <p>Note that {@link #refresh()} must be called in order for the context
* to fully process the new classes.
* @param basePackages the packages to check for annotated classes
* @see #register(Class...)
* @see #refresh()
*/ | Perform a scan within the specified base packages. Note that <code>#refresh()</code> must be called in order for the context to fully process the new classes | scan | {
"repo_name": "lamsfoundation/lams",
"path": "3rdParty_sources/spring/org/springframework/context/annotation/AnnotationConfigApplicationContext.java",
"license": "gpl-2.0",
"size": 6773
} | [
"org.springframework.util.Assert"
] | import org.springframework.util.Assert; | import org.springframework.util.*; | [
"org.springframework.util"
] | org.springframework.util; | 532,371 |
public PortType getPortType(String operationName) {
PortType result = null;
Map<String, PortType> portToOpMap = portTypeMap.get(getWSDLLocation());
if (portToOpMap != null) {
result = portToOpMap.get(operationName);
}
return result;
} | PortType function(String operationName) { PortType result = null; Map<String, PortType> portToOpMap = portTypeMap.get(getWSDLLocation()); if (portToOpMap != null) { result = portToOpMap.get(operationName); } return result; } | /**
* Provides the PortType for a given operation.
*
* @param operationName
* the name of the operation the PortType is required for.
* @return the PortType
*/ | Provides the PortType for a given operation | getPortType | {
"repo_name": "NCIP/taverna-grid",
"path": "cagrid-wsdl-generic/src/main/java/net/sf/taverna/cagrid/wsdl/parser/WSDLParser.java",
"license": "bsd-3-clause",
"size": 28694
} | [
"java.util.Map",
"javax.wsdl.PortType"
] | import java.util.Map; import javax.wsdl.PortType; | import java.util.*; import javax.wsdl.*; | [
"java.util",
"javax.wsdl"
] | java.util; javax.wsdl; | 1,915,969 |
public void save(final OutputStream out, final char separator) throws IOException {
checkRegistry();
final CSVDataOutput output = new CSVDataOutput(out, separator);
output.write(iterator());
}
| void function(final OutputStream out, final char separator) throws IOException { checkRegistry(); final CSVDataOutput output = new CSVDataOutput(out, separator); output.write(iterator()); } | /**
* Writes the data to a CSV file.
*
* @param out Output stream
* @param separator The utilized separator character
* @throws IOException Signals that an I/O exception has occurred.
*/ | Writes the data to a CSV file | save | {
"repo_name": "RaffaelBild/arx",
"path": "src/main/org/deidentifier/arx/DataHandle.java",
"license": "apache-2.0",
"size": 36032
} | [
"java.io.IOException",
"java.io.OutputStream",
"org.deidentifier.arx.io.CSVDataOutput"
] | import java.io.IOException; import java.io.OutputStream; import org.deidentifier.arx.io.CSVDataOutput; | import java.io.*; import org.deidentifier.arx.io.*; | [
"java.io",
"org.deidentifier.arx"
] | java.io; org.deidentifier.arx; | 2,255,110 |
public Date getCreateDt()
{
return createDt;
} | Date function() { return createDt; } | /**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column player_wuxing_card.create_dt
*
* @return the value of player_wuxing_card.create_dt
*
* @mbggenerated Thu Jun 27 17:09:25 CST 2013
*/ | This method was generated by MyBatis Generator. This method returns the value of the database column player_wuxing_card.create_dt | getCreateDt | {
"repo_name": "teaey/test-load",
"path": "src/main/java/cn/teaey/test/load/entity/PlayerWuxingCard.java",
"license": "apache-2.0",
"size": 23311
} | [
"java.util.Date"
] | import java.util.Date; | import java.util.*; | [
"java.util"
] | java.util; | 2,497,625 |
public static Dataset wrap(DatasetGraph dataset) {
Objects.requireNonNull(dataset, "Can't wrap a null DatasetGraph reference") ;
return DatasetImpl.wrap(dataset);
} | static Dataset function(DatasetGraph dataset) { Objects.requireNonNull(dataset, STR) ; return DatasetImpl.wrap(dataset); } | /**
* Wrap a {@link DatasetGraph} to make a dataset
*
* @param dataset DatasetGraph
* @return Dataset
*/ | Wrap a <code>DatasetGraph</code> to make a dataset | wrap | {
"repo_name": "apache/jena",
"path": "jena-arq/src/main/java/org/apache/jena/query/DatasetFactory.java",
"license": "apache-2.0",
"size": 9766
} | [
"org.apache.jena.sparql.core.DatasetGraph",
"org.apache.jena.sparql.core.DatasetImpl"
] | import org.apache.jena.sparql.core.DatasetGraph; import org.apache.jena.sparql.core.DatasetImpl; | import org.apache.jena.sparql.core.*; | [
"org.apache.jena"
] | org.apache.jena; | 1,300,790 |
public static FileHandle[] list(FileHandle folder, String suffix) {
FileHandle[] files = folder.list(suffix);
Arrays.sort(files, DEFAULT_COMPARATOR);
return files;
} | static FileHandle[] function(FileHandle folder, String suffix) { FileHandle[] files = folder.list(suffix); Arrays.sort(files, DEFAULT_COMPARATOR); return files; } | /**
* Wrapper to {@link FileHandle#list(String)} which also sorts the result.
*/ | Wrapper to <code>FileHandle#list(String)</code> which also sorts the result | list | {
"repo_name": "code-disaster/libgdx-snippets",
"path": "src/main/java/com/badlogic/gdx/files/FileUtils.java",
"license": "mit",
"size": 5237
} | [
"java.util.Arrays"
] | import java.util.Arrays; | import java.util.*; | [
"java.util"
] | java.util; | 1,237,980 |
@Override
protected SubToolBarManager createSubToolBarManager(IToolBarManager parent) {
return new SubToolBarManager(parent);
} | SubToolBarManager function(IToolBarManager parent) { return new SubToolBarManager(parent); } | /**
* Same implementation as in SubActionBars. The EditorActionBars
* implementation simply returns null.
*/ | Same implementation as in SubActionBars. The EditorActionBars implementation simply returns null | createSubToolBarManager | {
"repo_name": "nwnpallewela/developer-studio",
"path": "bps/plugins/org.eclipse.bpel.common.ui/src/org/eclipse/bpel/common/ui/composite/CompositeEditorActionBars.java",
"license": "apache-2.0",
"size": 4270
} | [
"org.eclipse.jface.action.IToolBarManager",
"org.eclipse.jface.action.SubToolBarManager"
] | import org.eclipse.jface.action.IToolBarManager; import org.eclipse.jface.action.SubToolBarManager; | import org.eclipse.jface.action.*; | [
"org.eclipse.jface"
] | org.eclipse.jface; | 2,459,226 |
@Test (timeout = 60000)
public void testAuditLogForAcls() throws Exception {
final Configuration conf = new HdfsConfiguration();
conf.setBoolean(DFS_NAMENODE_ACLS_ENABLED_KEY, true);
conf.set(DFS_NAMENODE_AUDIT_LOGGERS_KEY,
DummyAuditLogger.class.getName());
final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build();
try {
cluster.waitClusterUp();
assertTrue(DummyAuditLogger.initialized);
final FileSystem fs = cluster.getFileSystem();
final Path p = new Path("/debug.log");
DFSTestUtil.createFile(fs, p, 1024, (short)1, 0L);
DummyAuditLogger.resetLogCount();
fs.getAclStatus(p);
assertEquals(1, DummyAuditLogger.logCount);
// FS shell command '-getfacl' additionally calls getFileInfo() and then
// followed by getAclStatus() only if the ACL bit is set. Since the
// initial permission didn't have the ACL bit set, getAclStatus() is
// skipped.
DFSTestUtil.FsShellRun("-getfacl " + p.toUri().getPath(), 0, null, conf);
assertEquals(2, DummyAuditLogger.logCount);
final List<AclEntry> acls = Lists.newArrayList();
acls.add(AclTestHelpers.aclEntry(ACCESS, USER, ALL));
acls.add(AclTestHelpers.aclEntry(ACCESS, USER, "user1", ALL));
acls.add(AclTestHelpers.aclEntry(ACCESS, GROUP, READ_EXECUTE));
acls.add(AclTestHelpers.aclEntry(ACCESS, OTHER, EXECUTE));
fs.setAcl(p, acls);
assertEquals(3, DummyAuditLogger.logCount);
// Since the file has ACL bit set, FS shell command '-getfacl' should now
// call getAclStatus() additionally after getFileInfo().
DFSTestUtil.FsShellRun("-getfacl " + p.toUri().getPath(), 0, null, conf);
assertEquals(5, DummyAuditLogger.logCount);
fs.removeAcl(p);
assertEquals(6, DummyAuditLogger.logCount);
List<AclEntry> aclsToRemove = Lists.newArrayList();
aclsToRemove.add(AclTestHelpers.aclEntry(DEFAULT, USER, "user1", ALL));
fs.removeAclEntries(p, aclsToRemove);
fs.removeDefaultAcl(p);
assertEquals(8, DummyAuditLogger.logCount);
// User ACL has been removed, FS shell command '-getfacl' should now
// skip call to getAclStatus() after getFileInfo().
DFSTestUtil.FsShellRun("-getfacl " + p.toUri().getPath(), 0, null, conf);
assertEquals(9, DummyAuditLogger.logCount);
assertEquals(0, DummyAuditLogger.unsuccessfulCount);
} finally {
cluster.shutdown();
}
} | @Test (timeout = 60000) void function() throws Exception { final Configuration conf = new HdfsConfiguration(); conf.setBoolean(DFS_NAMENODE_ACLS_ENABLED_KEY, true); conf.set(DFS_NAMENODE_AUDIT_LOGGERS_KEY, DummyAuditLogger.class.getName()); final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build(); try { cluster.waitClusterUp(); assertTrue(DummyAuditLogger.initialized); final FileSystem fs = cluster.getFileSystem(); final Path p = new Path(STR); DFSTestUtil.createFile(fs, p, 1024, (short)1, 0L); DummyAuditLogger.resetLogCount(); fs.getAclStatus(p); assertEquals(1, DummyAuditLogger.logCount); DFSTestUtil.FsShellRun(STR + p.toUri().getPath(), 0, null, conf); assertEquals(2, DummyAuditLogger.logCount); final List<AclEntry> acls = Lists.newArrayList(); acls.add(AclTestHelpers.aclEntry(ACCESS, USER, ALL)); acls.add(AclTestHelpers.aclEntry(ACCESS, USER, "user1", ALL)); acls.add(AclTestHelpers.aclEntry(ACCESS, GROUP, READ_EXECUTE)); acls.add(AclTestHelpers.aclEntry(ACCESS, OTHER, EXECUTE)); fs.setAcl(p, acls); assertEquals(3, DummyAuditLogger.logCount); DFSTestUtil.FsShellRun(STR + p.toUri().getPath(), 0, null, conf); assertEquals(5, DummyAuditLogger.logCount); fs.removeAcl(p); assertEquals(6, DummyAuditLogger.logCount); List<AclEntry> aclsToRemove = Lists.newArrayList(); aclsToRemove.add(AclTestHelpers.aclEntry(DEFAULT, USER, "user1", ALL)); fs.removeAclEntries(p, aclsToRemove); fs.removeDefaultAcl(p); assertEquals(8, DummyAuditLogger.logCount); DFSTestUtil.FsShellRun(STR + p.toUri().getPath(), 0, null, conf); assertEquals(9, DummyAuditLogger.logCount); assertEquals(0, DummyAuditLogger.unsuccessfulCount); } finally { cluster.shutdown(); } } | /**
* Verify Audit log entries for the successful ACL API calls and ACL commands
* over FS Shell.
*/ | Verify Audit log entries for the successful ACL API calls and ACL commands over FS Shell | testAuditLogForAcls | {
"repo_name": "JingchengDu/hadoop",
"path": "hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogger.java",
"license": "apache-2.0",
"size": 25231
} | [
"java.util.List",
"org.apache.hadoop.conf.Configuration",
"org.apache.hadoop.fs.FileSystem",
"org.apache.hadoop.fs.Path",
"org.apache.hadoop.fs.permission.AclEntry",
"org.apache.hadoop.hdfs.DFSTestUtil",
"org.apache.hadoop.hdfs.HdfsConfiguration",
"org.apache.hadoop.hdfs.MiniDFSCluster",
"org.apache.hadoop.util.Lists",
"org.junit.Assert",
"org.junit.Test"
] | import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.AclEntry; import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.util.Lists; import org.junit.Assert; import org.junit.Test; | import java.util.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.fs.permission.*; import org.apache.hadoop.hdfs.*; import org.apache.hadoop.util.*; import org.junit.*; | [
"java.util",
"org.apache.hadoop",
"org.junit"
] | java.util; org.apache.hadoop; org.junit; | 2,569,625 |
public static DopplerCentroidCoefficientList[] getDopplerCentroidCoefficients(final MetadataElement absRoot) {
final MetadataElement elemRoot = absRoot.getElement(dop_coefficients);
final MetadataElement[] dop_coef_listElem = elemRoot.getElements();
final DopplerCentroidCoefficientList[] dopCoefficientList = new DopplerCentroidCoefficientList[dop_coef_listElem.length];
int k = 0;
for (MetadataElement listElem : dop_coef_listElem) {
final DopplerCentroidCoefficientList dopList = new DopplerCentroidCoefficientList();
dopList.time = listElem.getAttributeUTC(srgr_coef_time);
dopList.timeMJD = dopList.time.getMJD();
dopList.slant_range_time = listElem.getAttributeDouble(slant_range_time, 0.0);
final int numSubElems = listElem.getNumElements();
dopList.coefficients = new double[numSubElems];
for (int i = 0; i < numSubElems; ++i) {
final MetadataElement coefElem = listElem.getElementAt(i);
dopList.coefficients[i] = coefElem.getAttributeDouble(dop_coef, 0.0);
}
dopCoefficientList[k++] = dopList;
}
return dopCoefficientList;
} | static DopplerCentroidCoefficientList[] function(final MetadataElement absRoot) { final MetadataElement elemRoot = absRoot.getElement(dop_coefficients); final MetadataElement[] dop_coef_listElem = elemRoot.getElements(); final DopplerCentroidCoefficientList[] dopCoefficientList = new DopplerCentroidCoefficientList[dop_coef_listElem.length]; int k = 0; for (MetadataElement listElem : dop_coef_listElem) { final DopplerCentroidCoefficientList dopList = new DopplerCentroidCoefficientList(); dopList.time = listElem.getAttributeUTC(srgr_coef_time); dopList.timeMJD = dopList.time.getMJD(); dopList.slant_range_time = listElem.getAttributeDouble(slant_range_time, 0.0); final int numSubElems = listElem.getNumElements(); dopList.coefficients = new double[numSubElems]; for (int i = 0; i < numSubElems; ++i) { final MetadataElement coefElem = listElem.getElementAt(i); dopList.coefficients[i] = coefElem.getAttributeDouble(dop_coef, 0.0); } dopCoefficientList[k++] = dopList; } return dopCoefficientList; } | /**
* Get Doppler Centroid Coefficients.
*
* @param absRoot Abstracted metadata root.
* @return Array of Doppler centroid coefficient data sets.
*/ | Get Doppler Centroid Coefficients | getDopplerCentroidCoefficients | {
"repo_name": "valgur/snap-engine",
"path": "snap-engine-utilities/src/main/java/org/esa/snap/datamodel/AbstractMetadata.java",
"license": "gpl-3.0",
"size": 45621
} | [
"org.esa.snap.framework.datamodel.MetadataElement"
] | import org.esa.snap.framework.datamodel.MetadataElement; | import org.esa.snap.framework.datamodel.*; | [
"org.esa.snap"
] | org.esa.snap; | 1,141,197 |