comment
stringlengths 16
8.84k
| method_body
stringlengths 37
239k
| target_code
stringlengths 0
242
| method_body_after
stringlengths 29
239k
| context_before
stringlengths 14
424k
| context_after
stringlengths 14
284k
|
---|---|---|---|---|---|
we do, however there's a way to override it globally https://github.com/Azure/azure-sdk-for-java/blob/ad3586aef64218360521d72848f2befe6b7f5971/sdk/storage/azure-storage-common/src/main/java/com/azure/storage/common/implementation/Constants.java#L88-L91 . | private String stringToSign(String canonicalName) {
String versionSegment = this.snapshotId == null ? this.versionId : this.snapshotId;
return String.join("\n",
this.permissions == null ? "" : permissions,
this.startTime == null ? "" : Constants.ISO_8601_UTC_DATE_FORMATTER.format(this.startTime),
this.expiryTime == null ? "" : Constants.ISO_8601_UTC_DATE_FORMATTER.format(this.expiryTime),
canonicalName,
this.identifier == null ? "" : this.identifier,
this.sasIpRange == null ? "" : this.sasIpRange.toString(),
this.protocol == null ? "" : this.protocol.toString(),
VERSION,
resource,
versionSegment == null ? "" : versionSegment,
this.encryptionScope == null ? "" : this.encryptionScope,
this.cacheControl == null ? "" : this.cacheControl,
this.contentDisposition == null ? "" : this.contentDisposition,
this.contentEncoding == null ? "" : this.contentEncoding,
this.contentLanguage == null ? "" : this.contentLanguage,
this.contentType == null ? "" : this.contentType
);
} | this.encryptionScope == null ? "" : this.encryptionScope, | private String stringToSign(String canonicalName) {
String versionSegment = this.snapshotId == null ? this.versionId : this.snapshotId;
if (VERSION.compareTo(BlobServiceVersion.V2020_10_02.getVersion()) <= 0) {
return String.join("\n",
this.permissions == null ? "" : permissions,
this.startTime == null ? "" : Constants.ISO_8601_UTC_DATE_FORMATTER.format(this.startTime),
this.expiryTime == null ? "" : Constants.ISO_8601_UTC_DATE_FORMATTER.format(this.expiryTime),
canonicalName,
this.identifier == null ? "" : this.identifier,
this.sasIpRange == null ? "" : this.sasIpRange.toString(),
this.protocol == null ? "" : this.protocol.toString(),
VERSION,
resource,
versionSegment == null ? "" : versionSegment,
this.cacheControl == null ? "" : this.cacheControl,
this.contentDisposition == null ? "" : this.contentDisposition,
this.contentEncoding == null ? "" : this.contentEncoding,
this.contentLanguage == null ? "" : this.contentLanguage,
this.contentType == null ? "" : this.contentType
);
} else {
return String.join("\n",
this.permissions == null ? "" : permissions,
this.startTime == null ? "" : Constants.ISO_8601_UTC_DATE_FORMATTER.format(this.startTime),
this.expiryTime == null ? "" : Constants.ISO_8601_UTC_DATE_FORMATTER.format(this.expiryTime),
canonicalName,
this.identifier == null ? "" : this.identifier,
this.sasIpRange == null ? "" : this.sasIpRange.toString(),
this.protocol == null ? "" : this.protocol.toString(),
VERSION,
resource,
versionSegment == null ? "" : versionSegment,
this.encryptionScope == null ? "" : this.encryptionScope,
this.cacheControl == null ? "" : this.cacheControl,
this.contentDisposition == null ? "" : this.contentDisposition,
this.contentEncoding == null ? "" : this.contentEncoding,
this.contentLanguage == null ? "" : this.contentLanguage,
this.contentType == null ? "" : this.contentType
);
}
} | class BlobSasImplUtil {
/**
* The SAS blob constant.
*/
private static final String SAS_BLOB_CONSTANT = "b";
/**
* The SAS blob snapshot constant.
*/
private static final String SAS_BLOB_SNAPSHOT_CONSTANT = "bs";
/**
* The SAS blob version constant.
*/
private static final String SAS_BLOB_VERSION_CONSTANT = "bv";
/**
* The SAS blob container constant.
*/
private static final String SAS_CONTAINER_CONSTANT = "c";
private static final ClientLogger LOGGER = new ClientLogger(BlobSasImplUtil.class);
private static final String VERSION = Configuration.getGlobalConfiguration()
.get(Constants.PROPERTY_AZURE_STORAGE_SAS_SERVICE_VERSION, BlobServiceVersion.getLatest().getVersion());
private SasProtocol protocol;
private OffsetDateTime startTime;
private OffsetDateTime expiryTime;
private String permissions;
private SasIpRange sasIpRange;
private String containerName;
private String blobName;
private String resource;
private String snapshotId;
private String versionId;
private String identifier;
private String cacheControl;
private String contentDisposition;
private String contentEncoding;
private String contentLanguage;
private String contentType;
private String authorizedAadObjectId;
private String correlationId;
private String encryptionScope;
/**
* Creates a new {@link BlobSasImplUtil} with the specified parameters
*
* @param sasValues {@link BlobServiceSasSignatureValues}
* @param containerName The container name
*/
public BlobSasImplUtil(BlobServiceSasSignatureValues sasValues, String containerName) {
this(sasValues, containerName, null, null, null);
}
/**
* Creates a new {@link BlobSasImplUtil} with the specified parameters
*
* @param sasValues {@link BlobServiceSasSignatureValues}
* @param containerName The container name
* @param blobName The blob name
* @param snapshotId The snapshot id
* @param versionId The version id
*/
public BlobSasImplUtil(BlobServiceSasSignatureValues sasValues, String containerName, String blobName,
String snapshotId, String versionId) {
this(sasValues, containerName, blobName, snapshotId, versionId, null);
}
/**
* Creates a new {@link BlobSasImplUtil} with the specified parameters
*
* @param sasValues {@link BlobServiceSasSignatureValues}
* @param containerName The container name
* @param blobName The blob name
* @param snapshotId The snapshot id
* @param versionId The version id
* @param encryptionScope The encryption scope
*/
public BlobSasImplUtil(BlobServiceSasSignatureValues sasValues, String containerName, String blobName,
String snapshotId, String versionId, String encryptionScope) {
Objects.requireNonNull(sasValues);
if (snapshotId != null && versionId != null) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException("'snapshot' and 'versionId' cannot be used at the same time."));
}
this.protocol = sasValues.getProtocol();
this.startTime = sasValues.getStartTime();
this.expiryTime = sasValues.getExpiryTime();
this.permissions = sasValues.getPermissions();
this.sasIpRange = sasValues.getSasIpRange();
this.containerName = containerName;
this.blobName = blobName;
this.snapshotId = snapshotId;
this.versionId = versionId;
this.identifier = sasValues.getIdentifier();
this.cacheControl = sasValues.getCacheControl();
this.contentDisposition = sasValues.getContentDisposition();
this.contentEncoding = sasValues.getContentEncoding();
this.contentLanguage = sasValues.getContentLanguage();
this.contentType = sasValues.getContentType();
this.authorizedAadObjectId = sasValues.getPreauthorizedAgentObjectId();
this.correlationId = sasValues.getCorrelationId();
/*
Prefer the encryption scope explicitly set on the sas values. If none present, fallback to the value on the
client.
*/
this.encryptionScope = sasValues.getEncryptionScope() == null
? encryptionScope : sasValues.getEncryptionScope();
}
/**
* Generates a Sas signed with a {@link StorageSharedKeyCredential}
*
* @param storageSharedKeyCredentials {@link StorageSharedKeyCredential}
* @param context Additional context that is passed through the code when generating a SAS.
* @return A String representing the Sas
*/
public String generateSas(StorageSharedKeyCredential storageSharedKeyCredentials, Context context) {
StorageImplUtils.assertNotNull("storageSharedKeyCredentials", storageSharedKeyCredentials);
ensureState();
final String canonicalName = getCanonicalName(storageSharedKeyCredentials.getAccountName());
final String stringToSign = stringToSign(canonicalName);
StorageImplUtils.logStringToSign(LOGGER, stringToSign, context);
final String signature = storageSharedKeyCredentials.computeHmac256(stringToSign);
return encode(null /* userDelegationKey */, signature);
}
/**
* Generates a Sas signed with a {@link UserDelegationKey}
*
* @param delegationKey {@link UserDelegationKey}
* @param accountName The account name
* @param context Additional context that is passed through the code when generating a SAS.
* @return A String representing the Sas
*/
public String generateUserDelegationSas(UserDelegationKey delegationKey, String accountName, Context context) {
StorageImplUtils.assertNotNull("delegationKey", delegationKey);
StorageImplUtils.assertNotNull("accountName", accountName);
ensureState();
final String canonicalName = getCanonicalName(accountName);
final String stringToSign = stringToSign(delegationKey, canonicalName);
StorageImplUtils.logStringToSign(LOGGER, stringToSign, context);
String signature = StorageImplUtils.computeHMac256(delegationKey.getValue(), stringToSign);
return encode(delegationKey, signature);
}
/**
* Encodes a Sas from the values in this type.
* @param userDelegationKey {@link UserDelegationKey}
* @param signature The signature of the Sas.
* @return A String representing the Sas.
*/
private String encode(UserDelegationKey userDelegationKey, String signature) {
/*
We should be url-encoding each key and each value, but because we know all the keys and values will encode to
themselves, we cheat except for the signature value.
*/
StringBuilder sb = new StringBuilder();
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SERVICE_VERSION, VERSION);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_PROTOCOL, this.protocol);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_START_TIME, formatQueryParameterDate(this.startTime));
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_EXPIRY_TIME, formatQueryParameterDate(this.expiryTime));
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_IP_RANGE, this.sasIpRange);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SIGNED_IDENTIFIER, this.identifier);
if (userDelegationKey != null) {
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SIGNED_OBJECT_ID,
userDelegationKey.getSignedObjectId());
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SIGNED_TENANT_ID,
userDelegationKey.getSignedTenantId());
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SIGNED_KEY_START,
formatQueryParameterDate(userDelegationKey.getSignedStart()));
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SIGNED_KEY_EXPIRY,
formatQueryParameterDate(userDelegationKey.getSignedExpiry()));
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SIGNED_KEY_SERVICE,
userDelegationKey.getSignedService());
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SIGNED_KEY_VERSION,
userDelegationKey.getSignedVersion());
/* Only parameters relevant for user delegation SAS. */
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_PREAUTHORIZED_AGENT_OBJECT_ID, this.authorizedAadObjectId);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_CORRELATION_ID, this.correlationId);
}
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SIGNED_RESOURCE, this.resource);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SIGNED_PERMISSIONS, this.permissions);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SIGNATURE, signature);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_ENCRYPTION_SCOPE, this.encryptionScope);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_CACHE_CONTROL, this.cacheControl);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_CONTENT_DISPOSITION, this.contentDisposition);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_CONTENT_ENCODING, this.contentEncoding);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_CONTENT_LANGUAGE, this.contentLanguage);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_CONTENT_TYPE, this.contentType);
return sb.toString();
}
/**
* Ensures that the builder's properties are in a consistent state.
* 1. If there is no version, use latest.
* 2. If there is no identifier set, ensure expiryTime and permissions are set.
* 3. Resource name is chosen by:
* a. If "BlobName" is _not_ set, it is a container resource.
* b. Otherwise, if "SnapshotId" is set, it is a blob snapshot resource.
* c. Otherwise, if "VersionId" is set, it is a blob version resource.
* d. Otherwise, it is a blob resource.
* 4. Reparse permissions depending on what the resource is. If it is an unrecognized resource, do nothing.
*
* Taken from:
* https:
* https:
*/
private void ensureState() {
if (identifier == null) {
if (expiryTime == null || permissions == null) {
throw LOGGER.logExceptionAsError(new IllegalStateException("If identifier is not set, expiry time "
+ "and permissions must be set"));
}
}
if (CoreUtils.isNullOrEmpty(blobName)) {
resource = SAS_CONTAINER_CONSTANT;
} else if (snapshotId != null) {
resource = SAS_BLOB_SNAPSHOT_CONSTANT;
} else if (versionId != null) {
resource = SAS_BLOB_VERSION_CONSTANT;
} else {
resource = SAS_BLOB_CONSTANT;
}
if (permissions != null) {
switch (resource) {
case SAS_BLOB_CONSTANT:
case SAS_BLOB_SNAPSHOT_CONSTANT:
case SAS_BLOB_VERSION_CONSTANT:
permissions = BlobSasPermission.parse(permissions).toString();
break;
case SAS_CONTAINER_CONSTANT:
permissions = BlobContainerSasPermission.parse(permissions).toString();
break;
default:
LOGGER.info("Not re-parsing permissions. Resource type '{}' is unknown.", resource);
break;
}
}
}
/**
* Computes the canonical name for a container or blob resource for SAS signing.
*/
private String getCanonicalName(String account) {
return CoreUtils.isNullOrEmpty(blobName)
? String.format("/blob/%s/%s", account, containerName)
: String.format("/blob/%s/%s/%s", account, containerName, blobName.replace("\\", "/"));
}
private String stringToSign(final UserDelegationKey key, String canonicalName) {
String versionSegment = this.snapshotId == null ? this.versionId : this.snapshotId;
if (VERSION.compareTo(BlobServiceVersion.V2019_12_12.getVersion()) <= 0) {
return String.join("\n",
this.permissions == null ? "" : this.permissions,
this.startTime == null ? "" : Constants.ISO_8601_UTC_DATE_FORMATTER.format(this.startTime),
this.expiryTime == null ? "" : Constants.ISO_8601_UTC_DATE_FORMATTER.format(this.expiryTime),
canonicalName,
key.getSignedObjectId() == null ? "" : key.getSignedObjectId(),
key.getSignedTenantId() == null ? "" : key.getSignedTenantId(),
key.getSignedStart() == null ? "" : Constants.ISO_8601_UTC_DATE_FORMATTER.format(key.getSignedStart()),
key.getSignedExpiry() == null ? "" : Constants.ISO_8601_UTC_DATE_FORMATTER.format(key.getSignedExpiry()),
key.getSignedService() == null ? "" : key.getSignedService(),
key.getSignedVersion() == null ? "" : key.getSignedVersion(),
this.sasIpRange == null ? "" : this.sasIpRange.toString(),
this.protocol == null ? "" : this.protocol.toString(),
VERSION,
resource,
versionSegment == null ? "" : versionSegment,
this.cacheControl == null ? "" : this.cacheControl,
this.contentDisposition == null ? "" : this.contentDisposition,
this.contentEncoding == null ? "" : this.contentEncoding,
this.contentLanguage == null ? "" : this.contentLanguage,
this.contentType == null ? "" : this.contentType
);
} else {
return String.join("\n",
this.permissions == null ? "" : this.permissions,
this.startTime == null ? "" : Constants.ISO_8601_UTC_DATE_FORMATTER.format(this.startTime),
this.expiryTime == null ? "" : Constants.ISO_8601_UTC_DATE_FORMATTER.format(this.expiryTime),
canonicalName,
key.getSignedObjectId() == null ? "" : key.getSignedObjectId(),
key.getSignedTenantId() == null ? "" : key.getSignedTenantId(),
key.getSignedStart() == null ? "" : Constants.ISO_8601_UTC_DATE_FORMATTER.format(key.getSignedStart()),
key.getSignedExpiry() == null ? "" : Constants.ISO_8601_UTC_DATE_FORMATTER.format(key.getSignedExpiry()),
key.getSignedService() == null ? "" : key.getSignedService(),
key.getSignedVersion() == null ? "" : key.getSignedVersion(),
this.authorizedAadObjectId == null ? "" : this.authorizedAadObjectId,
"", /* suoid - empty since this applies to HNS only accounts. */
this.correlationId == null ? "" : this.correlationId,
this.sasIpRange == null ? "" : this.sasIpRange.toString(),
this.protocol == null ? "" : this.protocol.toString(),
VERSION,
resource,
versionSegment == null ? "" : versionSegment,
this.encryptionScope == null ? "" : this.encryptionScope,
this.cacheControl == null ? "" : this.cacheControl,
this.contentDisposition == null ? "" : this.contentDisposition,
this.contentEncoding == null ? "" : this.contentEncoding,
this.contentLanguage == null ? "" : this.contentLanguage,
this.contentType == null ? "" : this.contentType
);
}
}
} | class BlobSasImplUtil {
/**
* The SAS blob constant.
*/
private static final String SAS_BLOB_CONSTANT = "b";
/**
* The SAS blob snapshot constant.
*/
private static final String SAS_BLOB_SNAPSHOT_CONSTANT = "bs";
/**
* The SAS blob version constant.
*/
private static final String SAS_BLOB_VERSION_CONSTANT = "bv";
/**
* The SAS blob container constant.
*/
private static final String SAS_CONTAINER_CONSTANT = "c";
private static final ClientLogger LOGGER = new ClientLogger(BlobSasImplUtil.class);
private static final String VERSION = Configuration.getGlobalConfiguration()
.get(Constants.PROPERTY_AZURE_STORAGE_SAS_SERVICE_VERSION, BlobServiceVersion.getLatest().getVersion());
private SasProtocol protocol;
private OffsetDateTime startTime;
private OffsetDateTime expiryTime;
private String permissions;
private SasIpRange sasIpRange;
private String containerName;
private String blobName;
private String resource;
private String snapshotId;
private String versionId;
private String identifier;
private String cacheControl;
private String contentDisposition;
private String contentEncoding;
private String contentLanguage;
private String contentType;
private String authorizedAadObjectId;
private String correlationId;
private String encryptionScope;
/**
* Creates a new {@link BlobSasImplUtil} with the specified parameters
*
* @param sasValues {@link BlobServiceSasSignatureValues}
* @param containerName The container name
*/
public BlobSasImplUtil(BlobServiceSasSignatureValues sasValues, String containerName) {
this(sasValues, containerName, null, null, null, null);
}
/**
* Creates a new {@link BlobSasImplUtil} with the specified parameters
*
* @param sasValues {@link BlobServiceSasSignatureValues}
* @param containerName The container name
* @param blobName The blob name
* @param snapshotId The snapshot id
* @param versionId The version id
* @param encryptionScope The encryption scope
*/
public BlobSasImplUtil(BlobServiceSasSignatureValues sasValues, String containerName, String blobName,
String snapshotId, String versionId, String encryptionScope) {
Objects.requireNonNull(sasValues);
if (snapshotId != null && versionId != null) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException("'snapshot' and 'versionId' cannot be used at the same time."));
}
this.protocol = sasValues.getProtocol();
this.startTime = sasValues.getStartTime();
this.expiryTime = sasValues.getExpiryTime();
this.permissions = sasValues.getPermissions();
this.sasIpRange = sasValues.getSasIpRange();
this.containerName = containerName;
this.blobName = blobName;
this.snapshotId = snapshotId;
this.versionId = versionId;
this.identifier = sasValues.getIdentifier();
this.cacheControl = sasValues.getCacheControl();
this.contentDisposition = sasValues.getContentDisposition();
this.contentEncoding = sasValues.getContentEncoding();
this.contentLanguage = sasValues.getContentLanguage();
this.contentType = sasValues.getContentType();
this.authorizedAadObjectId = sasValues.getPreauthorizedAgentObjectId();
this.correlationId = sasValues.getCorrelationId();
this.encryptionScope = encryptionScope;
}
/**
* Generates a Sas signed with a {@link StorageSharedKeyCredential}
*
* @param storageSharedKeyCredentials {@link StorageSharedKeyCredential}
* @param context Additional context that is passed through the code when generating a SAS.
* @return A String representing the Sas
*/
public String generateSas(StorageSharedKeyCredential storageSharedKeyCredentials, Context context) {
StorageImplUtils.assertNotNull("storageSharedKeyCredentials", storageSharedKeyCredentials);
ensureState();
final String canonicalName = getCanonicalName(storageSharedKeyCredentials.getAccountName());
final String stringToSign = stringToSign(canonicalName);
StorageImplUtils.logStringToSign(LOGGER, stringToSign, context);
final String signature = storageSharedKeyCredentials.computeHmac256(stringToSign);
return encode(null /* userDelegationKey */, signature);
}
/**
* Generates a Sas signed with a {@link UserDelegationKey}
*
* @param delegationKey {@link UserDelegationKey}
* @param accountName The account name
* @param context Additional context that is passed through the code when generating a SAS.
* @return A String representing the Sas
*/
public String generateUserDelegationSas(UserDelegationKey delegationKey, String accountName, Context context) {
StorageImplUtils.assertNotNull("delegationKey", delegationKey);
StorageImplUtils.assertNotNull("accountName", accountName);
ensureState();
final String canonicalName = getCanonicalName(accountName);
final String stringToSign = stringToSign(delegationKey, canonicalName);
StorageImplUtils.logStringToSign(LOGGER, stringToSign, context);
String signature = StorageImplUtils.computeHMac256(delegationKey.getValue(), stringToSign);
return encode(delegationKey, signature);
}
/**
* Encodes a Sas from the values in this type.
* @param userDelegationKey {@link UserDelegationKey}
* @param signature The signature of the Sas.
* @return A String representing the Sas.
*/
private String encode(UserDelegationKey userDelegationKey, String signature) {
/*
We should be url-encoding each key and each value, but because we know all the keys and values will encode to
themselves, we cheat except for the signature value.
*/
StringBuilder sb = new StringBuilder();
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SERVICE_VERSION, VERSION);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_PROTOCOL, this.protocol);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_START_TIME, formatQueryParameterDate(this.startTime));
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_EXPIRY_TIME, formatQueryParameterDate(this.expiryTime));
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_IP_RANGE, this.sasIpRange);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SIGNED_IDENTIFIER, this.identifier);
if (userDelegationKey != null) {
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SIGNED_OBJECT_ID,
userDelegationKey.getSignedObjectId());
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SIGNED_TENANT_ID,
userDelegationKey.getSignedTenantId());
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SIGNED_KEY_START,
formatQueryParameterDate(userDelegationKey.getSignedStart()));
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SIGNED_KEY_EXPIRY,
formatQueryParameterDate(userDelegationKey.getSignedExpiry()));
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SIGNED_KEY_SERVICE,
userDelegationKey.getSignedService());
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SIGNED_KEY_VERSION,
userDelegationKey.getSignedVersion());
/* Only parameters relevant for user delegation SAS. */
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_PREAUTHORIZED_AGENT_OBJECT_ID, this.authorizedAadObjectId);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_CORRELATION_ID, this.correlationId);
}
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SIGNED_RESOURCE, this.resource);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SIGNED_PERMISSIONS, this.permissions);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_SIGNATURE, signature);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_ENCRYPTION_SCOPE, this.encryptionScope);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_CACHE_CONTROL, this.cacheControl);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_CONTENT_DISPOSITION, this.contentDisposition);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_CONTENT_ENCODING, this.contentEncoding);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_CONTENT_LANGUAGE, this.contentLanguage);
tryAppendQueryParameter(sb, Constants.UrlConstants.SAS_CONTENT_TYPE, this.contentType);
return sb.toString();
}
/**
* Ensures that the builder's properties are in a consistent state.
* 1. If there is no version, use latest.
* 2. If there is no identifier set, ensure expiryTime and permissions are set.
* 3. Resource name is chosen by:
* a. If "BlobName" is _not_ set, it is a container resource.
* b. Otherwise, if "SnapshotId" is set, it is a blob snapshot resource.
* c. Otherwise, if "VersionId" is set, it is a blob version resource.
* d. Otherwise, it is a blob resource.
* 4. Reparse permissions depending on what the resource is. If it is an unrecognized resource, do nothing.
*
* Taken from:
* https:
* https:
*/
private void ensureState() {
if (identifier == null) {
if (expiryTime == null || permissions == null) {
throw LOGGER.logExceptionAsError(new IllegalStateException("If identifier is not set, expiry time "
+ "and permissions must be set"));
}
}
if (CoreUtils.isNullOrEmpty(blobName)) {
resource = SAS_CONTAINER_CONSTANT;
} else if (snapshotId != null) {
resource = SAS_BLOB_SNAPSHOT_CONSTANT;
} else if (versionId != null) {
resource = SAS_BLOB_VERSION_CONSTANT;
} else {
resource = SAS_BLOB_CONSTANT;
}
if (permissions != null) {
switch (resource) {
case SAS_BLOB_CONSTANT:
case SAS_BLOB_SNAPSHOT_CONSTANT:
case SAS_BLOB_VERSION_CONSTANT:
permissions = BlobSasPermission.parse(permissions).toString();
break;
case SAS_CONTAINER_CONSTANT:
permissions = BlobContainerSasPermission.parse(permissions).toString();
break;
default:
LOGGER.info("Not re-parsing permissions. Resource type '{}' is unknown.", resource);
break;
}
}
}
/**
* Computes the canonical name for a container or blob resource for SAS signing.
*/
private String getCanonicalName(String account) {
return CoreUtils.isNullOrEmpty(blobName)
? String.format("/blob/%s/%s", account, containerName)
: String.format("/blob/%s/%s/%s", account, containerName, blobName.replace("\\", "/"));
}
private String stringToSign(final UserDelegationKey key, String canonicalName) {
String versionSegment = this.snapshotId == null ? this.versionId : this.snapshotId;
if (VERSION.compareTo(BlobServiceVersion.V2019_12_12.getVersion()) <= 0) {
return String.join("\n",
this.permissions == null ? "" : this.permissions,
this.startTime == null ? "" : Constants.ISO_8601_UTC_DATE_FORMATTER.format(this.startTime),
this.expiryTime == null ? "" : Constants.ISO_8601_UTC_DATE_FORMATTER.format(this.expiryTime),
canonicalName,
key.getSignedObjectId() == null ? "" : key.getSignedObjectId(),
key.getSignedTenantId() == null ? "" : key.getSignedTenantId(),
key.getSignedStart() == null ? "" : Constants.ISO_8601_UTC_DATE_FORMATTER.format(key.getSignedStart()),
key.getSignedExpiry() == null ? "" : Constants.ISO_8601_UTC_DATE_FORMATTER.format(key.getSignedExpiry()),
key.getSignedService() == null ? "" : key.getSignedService(),
key.getSignedVersion() == null ? "" : key.getSignedVersion(),
this.sasIpRange == null ? "" : this.sasIpRange.toString(),
this.protocol == null ? "" : this.protocol.toString(),
VERSION,
resource,
versionSegment == null ? "" : versionSegment,
this.cacheControl == null ? "" : this.cacheControl,
this.contentDisposition == null ? "" : this.contentDisposition,
this.contentEncoding == null ? "" : this.contentEncoding,
this.contentLanguage == null ? "" : this.contentLanguage,
this.contentType == null ? "" : this.contentType
);
} else if (VERSION.compareTo(BlobServiceVersion.V2020_10_02.getVersion()) <= 0) {
return String.join("\n",
this.permissions == null ? "" : this.permissions,
this.startTime == null ? "" : Constants.ISO_8601_UTC_DATE_FORMATTER.format(this.startTime),
this.expiryTime == null ? "" : Constants.ISO_8601_UTC_DATE_FORMATTER.format(this.expiryTime),
canonicalName,
key.getSignedObjectId() == null ? "" : key.getSignedObjectId(),
key.getSignedTenantId() == null ? "" : key.getSignedTenantId(),
key.getSignedStart() == null ? "" : Constants.ISO_8601_UTC_DATE_FORMATTER.format(key.getSignedStart()),
key.getSignedExpiry() == null ? "" : Constants.ISO_8601_UTC_DATE_FORMATTER.format(key.getSignedExpiry()),
key.getSignedService() == null ? "" : key.getSignedService(),
key.getSignedVersion() == null ? "" : key.getSignedVersion(),
this.authorizedAadObjectId == null ? "" : this.authorizedAadObjectId,
"", /* suoid - empty since this applies to HNS only accounts. */
this.correlationId == null ? "" : this.correlationId,
this.sasIpRange == null ? "" : this.sasIpRange.toString(),
this.protocol == null ? "" : this.protocol.toString(),
VERSION,
resource,
versionSegment == null ? "" : versionSegment,
this.cacheControl == null ? "" : this.cacheControl,
this.contentDisposition == null ? "" : this.contentDisposition,
this.contentEncoding == null ? "" : this.contentEncoding,
this.contentLanguage == null ? "" : this.contentLanguage,
this.contentType == null ? "" : this.contentType
);
} else {
return String.join("\n",
this.permissions == null ? "" : this.permissions,
this.startTime == null ? "" : Constants.ISO_8601_UTC_DATE_FORMATTER.format(this.startTime),
this.expiryTime == null ? "" : Constants.ISO_8601_UTC_DATE_FORMATTER.format(this.expiryTime),
canonicalName,
key.getSignedObjectId() == null ? "" : key.getSignedObjectId(),
key.getSignedTenantId() == null ? "" : key.getSignedTenantId(),
key.getSignedStart() == null ? "" : Constants.ISO_8601_UTC_DATE_FORMATTER.format(key.getSignedStart()),
key.getSignedExpiry() == null ? "" : Constants.ISO_8601_UTC_DATE_FORMATTER.format(key.getSignedExpiry()),
key.getSignedService() == null ? "" : key.getSignedService(),
key.getSignedVersion() == null ? "" : key.getSignedVersion(),
this.authorizedAadObjectId == null ? "" : this.authorizedAadObjectId,
"", /* suoid - empty since this applies to HNS only accounts. */
this.correlationId == null ? "" : this.correlationId,
this.sasIpRange == null ? "" : this.sasIpRange.toString(),
this.protocol == null ? "" : this.protocol.toString(),
VERSION,
resource,
versionSegment == null ? "" : versionSegment,
this.encryptionScope == null ? "" : this.encryptionScope,
this.cacheControl == null ? "" : this.cacheControl,
this.contentDisposition == null ? "" : this.contentDisposition,
this.contentEncoding == null ? "" : this.contentEncoding,
this.contentLanguage == null ? "" : this.contentLanguage,
this.contentType == null ? "" : this.contentType
);
}
}
} |
We should have tests that cover each one of the classes being instrumented... | public void testNettyEventExecutorMetrics() {
testNettyMetrics(2L, NettyEventExecutorMetrics.class);
} | testNettyMetrics(2L, NettyEventExecutorMetrics.class); | public void testNettyEventExecutorMetrics() {
testNettyMetrics(2L, NettyEventExecutorMetrics.class);
} | class NettyMetricsTest {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withConfigurationResource("test-logging.properties")
.overrideConfigKey("quarkus.micrometer.binder-enabled-default", "false")
.overrideConfigKey("quarkus.micrometer.binder.netty.enabled", "true")
.overrideConfigKey("quarkus.redis.devservices.enabled", "false");
@Inject
@Any
Instance<MeterBinder> binders;
private void testNettyMetrics(long expected, Class<? extends MeterBinder> mbClass) {
Assertions.assertFalse(binders.isUnsatisfied());
long count = binders.stream()
.filter(mbClass::isInstance)
.count();
Assertions.assertEquals(expected, count);
}
@Test
public void testNettyAllocatorMetrics() {
testNettyMetrics(5L, NettyAllocatorMetrics.class);
}
@Test
} | class NettyMetricsTest {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot(jar -> jar.addClasses(HelloResource.class))
.withConfigurationResource("test-logging.properties")
.overrideConfigKey("quarkus.micrometer.binder-enabled-default", "false")
.overrideConfigKey("quarkus.micrometer.binder.netty.enabled", "true")
.overrideConfigKey("quarkus.redis.devservices.enabled", "false");
@Inject
@Any
Instance<MeterBinder> binders;
@Inject
MeterRegistry registry;
@Inject
Vertx vertx;
private static final Set<Tag> NAM_PBBA_TAGS = Tags.of(
"id", String.valueOf(PooledByteBufAllocator.DEFAULT.hashCode()),
"allocator.type", "PooledByteBufAllocator")
.stream()
.collect(Collectors.toSet());
private static final Set<Tag> NAM_UNPBBA_TAGS = Tags.of(
"id", String.valueOf(UnpooledByteBufAllocator.DEFAULT.hashCode()),
"allocator.type", "UnpooledByteBufAllocator")
.stream()
.collect(Collectors.toSet());
private static final Set<Tag> VX_NAM_PBBA_TAGS = Tags.of(
"id", String.valueOf(VertxByteBufAllocator.POOLED_ALLOCATOR.hashCode()),
"allocator.type", "PooledByteBufAllocator")
.stream()
.collect(Collectors.toSet());
private static final Set<Tag> VX_NAM_UNPBBA_TAGS = Tags.of(
"id", String.valueOf(VertxByteBufAllocator.UNPOOLED_ALLOCATOR.hashCode()),
"allocator.type", "UnpooledByteBufAllocator")
.stream()
.collect(Collectors.toSet());
private static final Tag HEAP_MEMORY = Tag.of(AllocatorMemoryKeyNames.MEMORY_TYPE.asString(), "heap");
private static final Tag DIRECT_MEMORY = Tag.of(AllocatorMemoryKeyNames.MEMORY_TYPE.asString(), "direct");
enum AllocatorKeyNames implements KeyName {
ID {
public String asString() {
return "id";
}
},
ALLOCATOR_TYPE {
public String asString() {
return "allocator.type";
}
};
}
enum AllocatorMemoryKeyNames implements KeyName {
MEMORY_TYPE {
public String asString() {
return "memory.type";
}
};
}
private void testNettyMetrics(long expected, Class<? extends MeterBinder> mbClass) {
Assertions.assertFalse(binders.isUnsatisfied());
long count = binders.stream()
.filter(mbClass::isInstance)
.count();
Assertions.assertEquals(expected, count);
}
private static Double getValue(List<Meter> meters, Set<Tag> expected) {
for (Meter meter : meters) {
List<Tag> tags = meter.getId().getTags();
if (tags.containsAll(expected)) {
return meter.match(Gauge::value, null, null, null, null, null, null, null, null);
}
}
return null;
}
private static Set<Tag> tags(Set<Tag> tags, Tag tag) {
Set<Tag> newTags = new HashSet<>(tags);
newTags.add(tag);
return newTags;
}
private void testAllocatorMetricsValues(Set<Tag> tags) {
List<Meter> meters = registry.getMeters();
Double heap0 = getValue(meters, tags(tags, HEAP_MEMORY));
Assertions.assertNotNull(heap0);
Double direct0 = getValue(meters, tags(tags, DIRECT_MEMORY));
Assertions.assertNotNull(direct0);
RestAssured.get("/hello/Netty").then().body(Matchers.equalTo("hello Netty"));
Double heap1 = getValue(meters, tags(tags, HEAP_MEMORY));
Double direct1 = getValue(meters, tags(tags, DIRECT_MEMORY));
Assertions.assertTrue(heap0 <= heap1);
Assertions.assertTrue(direct0 <= direct1);
}
@Test
public void testNettyAllocatorMetrics() {
testNettyMetrics(5L, NettyAllocatorMetrics.class);
}
@Test
@Test
public void testAllocatorMetricsValues() {
testAllocatorMetricsValues(NAM_PBBA_TAGS);
testAllocatorMetricsValues(NAM_UNPBBA_TAGS);
testAllocatorMetricsValues(VX_NAM_PBBA_TAGS);
testAllocatorMetricsValues(VX_NAM_UNPBBA_TAGS);
}
@Test
@Timeout(60L)
public void testEventExecutorMetricsValues() throws Exception {
VertxInternal vi = (VertxInternal) vertx;
assertEventGroup(vi.getEventLoopGroup());
assertEventGroup(vi.getAcceptorEventLoopGroup());
}
private void assertEventGroup(EventLoopGroup group) throws Exception {
int tasks = 0;
for (EventExecutor ee : group) {
tasks++;
}
final CyclicBarrier allPendingTasksAreIn = new CyclicBarrier(tasks + 1);
CountDownLatch waitCollectingMeasures = new CountDownLatch(1);
List<Future<Future<?>>> pendingTasksCompleted = new ArrayList<>(tasks);
for (EventExecutor eventLoop : group) {
pendingTasksCompleted.add(eventLoop.submit(() -> {
try {
Future<?> pendingTask = eventLoop.submit(() -> {
});
allPendingTasksAreIn.await();
waitCollectingMeasures.await();
return pendingTask;
} catch (Throwable ignore) {
return null;
}
}));
}
allPendingTasksAreIn.await();
List<Meter> meters = registry.getMeters();
for (EventExecutor eventLoop : group) {
checkMetrics(meters, eventLoop, 1);
}
waitCollectingMeasures.countDown();
for (Future<Future<?>> pendingTaskCompleted : pendingTasksCompleted) {
pendingTaskCompleted.get().get();
}
for (EventExecutor eventLoop : group) {
checkMetrics(meters, eventLoop, 0);
}
}
private void checkMetrics(List<Meter> meters, EventExecutor executor, int expected) {
if (executor instanceof SingleThreadEventExecutor) {
SingleThreadEventExecutor stee = (SingleThreadEventExecutor) executor;
int pendingTasks = stee.pendingTasks();
Assertions.assertEquals(expected, pendingTasks);
Tag tag = Tag.of("name", stee.threadProperties().name());
Set<Tag> tags = Set.of(tag);
Double metricsValue = getValue(meters, tags);
Assertions.assertNotNull(metricsValue);
int mvInt = metricsValue.intValue();
Assertions.assertEquals(expected, mvInt);
}
}
} |
```suggestion "Detected platform dependencies with provided scope. Redistribution is not " + ``` | private void addProvidedDependencyWarning(List<Diagnostic> emitResultDiagnostics) {
if (!jarResolver.providedPlatformLibs().isEmpty()) {
DiagnosticInfo diagnosticInfo = new DiagnosticInfo(
ProjectDiagnosticErrorCode.PROVIDED_PLATFORM_JAR_IN_EXECUTABLE.diagnosticId(),
"Detected platform dependencies with provided scope; redistribution is not " +
"recommended due to potential license restrictions\n",
DiagnosticSeverity.WARNING);
emitResultDiagnostics.add(new PackageDiagnostic(diagnosticInfo,
this.packageContext().descriptor().name().toString()));
}
} | "Detected platform dependencies with provided scope; redistribution is not " + | private void addProvidedDependencyWarning(List<Diagnostic> emitResultDiagnostics) {
if (!jarResolver.providedPlatformLibs().isEmpty()) {
DiagnosticInfo diagnosticInfo = new DiagnosticInfo(
ProjectDiagnosticErrorCode.PROVIDED_PLATFORM_JAR_IN_EXECUTABLE.diagnosticId(),
String.format("Detected platform dependencies with '%s' scope. Redistribution is discouraged" +
" due to potential license restrictions%n", PlatformLibraryScope.PROVIDED.getStringValue()),
DiagnosticSeverity.WARNING);
emitResultDiagnostics.add(new PackageDiagnostic(diagnosticInfo,
this.packageContext().descriptor().name().toString()));
}
} | class JarConflict {
JarLibrary firstJarLibrary;
JarLibrary secondJarLibrary;
List<String> classes;
JarConflict(JarLibrary firstJarLibrary, JarLibrary secondJarLibrary, List<String> classes) {
this.firstJarLibrary = firstJarLibrary;
this.secondJarLibrary = secondJarLibrary;
this.classes = classes;
}
JarLibrary firstJarLibrary() {
return firstJarLibrary;
}
void addClasses(String entry) {
classes.add(entry);
}
public String getWarning(boolean listClasses) {
String conflictedJarPkg1 = "";
String conflictedJarPkg2 = "";
if (firstJarLibrary.packageName().isPresent()) {
conflictedJarPkg1 = " dependency of '" + firstJarLibrary.packageName().get() + "'";
}
if (secondJarLibrary.packageName().isPresent()) {
conflictedJarPkg2 = " dependency of '" + secondJarLibrary.packageName().get() + "'";
}
StringBuilder warning = new StringBuilder(
"\t\t'" + firstJarLibrary.path().getFileName() + "'" + conflictedJarPkg1 + " conflict with '"
+ secondJarLibrary.path().getFileName() + "'" + conflictedJarPkg2);
if (listClasses) {
for (String conflictedClass : classes) {
warning.append("\n\t\t\t").append(conflictedClass);
}
}
return String.valueOf(warning);
}
} | class JarConflict {
JarLibrary firstJarLibrary;
JarLibrary secondJarLibrary;
List<String> classes;
JarConflict(JarLibrary firstJarLibrary, JarLibrary secondJarLibrary, List<String> classes) {
this.firstJarLibrary = firstJarLibrary;
this.secondJarLibrary = secondJarLibrary;
this.classes = classes;
}
JarLibrary firstJarLibrary() {
return firstJarLibrary;
}
void addClasses(String entry) {
classes.add(entry);
}
public String getWarning(boolean listClasses) {
String conflictedJarPkg1 = "";
String conflictedJarPkg2 = "";
if (firstJarLibrary.packageName().isPresent()) {
conflictedJarPkg1 = " dependency of '" + firstJarLibrary.packageName().get() + "'";
}
if (secondJarLibrary.packageName().isPresent()) {
conflictedJarPkg2 = " dependency of '" + secondJarLibrary.packageName().get() + "'";
}
StringBuilder warning = new StringBuilder(
"\t\t'" + firstJarLibrary.path().getFileName() + "'" + conflictedJarPkg1 + " conflict with '"
+ secondJarLibrary.path().getFileName() + "'" + conflictedJarPkg2);
if (listClasses) {
for (String conflictedClass : classes) {
warning.append("\n\t\t\t").append(conflictedClass);
}
}
return String.valueOf(warning);
}
} |
I suppose you could get back to using `arc` now? I had to change it because of a conflict with `hibernate-search-elasticsearch`. | void addSomeValidExtensions() throws IOException {
final File pom = new File("target/extensions-test", "pom.xml");
CreateProjectTest.delete(pom.getParentFile());
new CreateProject(new FileProjectWriter(pom.getParentFile()))
.groupId("org.acme")
.artifactId("add-extension-test")
.version("0.0.1-SNAPSHOT")
.doCreateProject(new HashMap<>());
File pomFile = new File(pom.getAbsolutePath());
new AddExtensions(new FileProjectWriter(pomFile.getParentFile()), pomFile.getName())
.addExtensions(new HashSet<>(asList("jdbc-postgre", "agroal", "quarkus-arc", " hibernate-validator",
"commons-io:commons-io:2.6")));
Model model = MojoUtils.readPom(pom);
hasDependency(model, "quarkus-agroal");
hasDependency(model, "quarkus-arc");
hasDependency(model, "quarkus-hibernate-validator");
hasDependency(model, "commons-io", "commons-io", "2.6");
doesNotHaveDependency(model, "quarkus-jdbc-postgresql");
} | Model model = MojoUtils.readPom(pom); | void addSomeValidExtensions() throws IOException {
final File pom = new File("target/extensions-test", "pom.xml");
CreateProjectTest.delete(pom.getParentFile());
new CreateProject(new FileProjectWriter(pom.getParentFile()))
.groupId("org.acme")
.artifactId("add-extension-test")
.version("0.0.1-SNAPSHOT")
.doCreateProject(new HashMap<>());
File pomFile = new File(pom.getAbsolutePath());
new AddExtensions(new FileProjectWriter(pomFile.getParentFile()), pomFile.getName())
.addExtensions(new HashSet<>(asList("jdbc-postgre", "agroal", "quarkus-arc", " hibernate-validator",
"commons-io:commons-io:2.6")));
Model model = MojoUtils.readPom(pom);
hasDependency(model, "quarkus-agroal");
hasDependency(model, "quarkus-arc");
hasDependency(model, "quarkus-hibernate-validator");
hasDependency(model, "commons-io", "commons-io", "2.6");
hasDependency(model, "quarkus-jdbc-postgresql");
} | class AddExtensionsTest {
@Test
@Test
void addMissingExtension() throws IOException {
final File pom = new File("target/extensions-test", "pom.xml");
CreateProjectTest.delete(pom.getParentFile());
new CreateProject(new FileProjectWriter(pom.getParentFile()))
.groupId("org.acme")
.artifactId("add-extension-test")
.version("0.0.1-SNAPSHOT")
.doCreateProject(new HashMap<>());
File pomFile = new File(pom.getAbsolutePath());
AddExtensionResult result = new AddExtensions(new FileProjectWriter(pomFile.getParentFile()), pomFile.getName())
.addExtensions(new HashSet<>(asList("missing")));
Model model = MojoUtils.readPom(pom);
doesNotHaveDependency(model, "quarkus-missing");
Assertions.assertFalse(result.succeeded());
Assertions.assertFalse(result.isUpdated());
}
@Test
void addExtensionTwiceInOneBatch() throws IOException {
final File pom = new File("target/extensions-test", "pom.xml");
CreateProjectTest.delete(pom.getParentFile());
new CreateProject(new FileProjectWriter(pom.getParentFile()))
.groupId("org.acme")
.artifactId("add-extension-test")
.version("0.0.1-SNAPSHOT")
.doCreateProject(new HashMap<>());
File pomFile = new File(pom.getAbsolutePath());
AddExtensionResult result = new AddExtensions(new FileProjectWriter(pomFile.getParentFile()), pomFile.getName())
.addExtensions(new HashSet<>(asList("agroal", "agroal")));
Model model = MojoUtils.readPom(pom);
hasDependency(model, "quarkus-agroal");
Assertions.assertEquals(1,
model.getDependencies().stream().filter(d -> d.getArtifactId().equals("quarkus-agroal")).count());
Assertions.assertTrue(result.isUpdated());
Assertions.assertTrue(result.succeeded());
}
@Test
void addExtensionTwiceInTwoBatches() throws IOException {
final File pom = new File("target/extensions-test", "pom.xml");
CreateProjectTest.delete(pom.getParentFile());
new CreateProject(new FileProjectWriter(pom.getParentFile()))
.groupId("org.acme")
.artifactId("add-extension-test")
.version("0.0.1-SNAPSHOT")
.doCreateProject(new HashMap<>());
File pomFile = new File(pom.getAbsolutePath());
AddExtensionResult result = new AddExtensions(new FileProjectWriter(pomFile.getParentFile()), pomFile.getName())
.addExtensions(new HashSet<>(Collections.singletonList("agroal")));
Model model = MojoUtils.readPom(pom);
hasDependency(model, "quarkus-agroal");
Assertions.assertEquals(1,
model.getDependencies().stream().filter(d -> d.getArtifactId().equals("quarkus-agroal")).count());
Assertions.assertTrue(result.isUpdated());
Assertions.assertTrue(result.succeeded());
AddExtensionResult result2 = new AddExtensions(new FileProjectWriter(pomFile.getParentFile()), pomFile.getName())
.addExtensions(new HashSet<>(Collections.singletonList("agroal")));
model = MojoUtils.readPom(pom);
hasDependency(model, "quarkus-agroal");
Assertions.assertEquals(1,
model.getDependencies().stream().filter(d -> d.getArtifactId().equals("quarkus-agroal")).count());
Assertions.assertFalse(result2.isUpdated());
Assertions.assertTrue(result2.succeeded());
}
@Test
void addExistingAndMissingExtensions() throws IOException {
final File pom = new File("target/extensions-test", "pom.xml");
CreateProjectTest.delete(pom.getParentFile());
new CreateProject(new FileProjectWriter(pom.getParentFile()))
.groupId("org.acme")
.artifactId("add-extension-test")
.version("0.0.1-SNAPSHOT")
.doCreateProject(new HashMap<>());
File pomFile = new File(pom.getAbsolutePath());
AddExtensionResult result = new AddExtensions(new FileProjectWriter(pomFile.getParentFile()), pomFile.getName())
.addExtensions(new HashSet<>(asList("missing", "agroal")));
Model model = MojoUtils.readPom(pom);
doesNotHaveDependency(model, "quarkus-missing");
hasDependency(model, "quarkus-agroal");
Assertions.assertFalse(result.succeeded());
Assertions.assertTrue(result.isUpdated());
}
@Test
void testMultiMatchByLabels() {
Extension e1 = new Extension("org.acme", "e1", "1.0")
.setName("some complex seo unaware name")
.setLabels(new String[] { "foo", "bar" });
Extension e2 = new Extension("org.acme", "e2", "1.0")
.setName("some foo bar")
.setLabels(new String[] { "foo", "bar", "baz" });
Extension e3 = new Extension("org.acme", "e3", "1.0")
.setName("unrelated")
.setLabels(new String[] { "bar" });
List<Extension> extensions = asList(e1, e2, e3);
Collections.shuffle(extensions);
SelectionResult matches = AddExtensions.select("foo", extensions);
Assertions.assertFalse(matches.matches());
Assertions.assertEquals(2, matches.getExtensions().size());
}
@Test
void testThatSingleLabelMatchIsNotAMatch() {
Extension e1 = new Extension("org.acme", "e1", "1.0")
.setName("e1")
.setLabels(new String[] { "foo", "bar" });
Extension e2 = new Extension("org.acme", "e2", "1.0")
.setName("e2")
.setLabels(new String[] { "bar", "baz" });
List<Extension> extensions = asList(e1, e2);
Collections.shuffle(extensions);
SelectionResult matches = AddExtensions.select("foo", extensions);
Assertions.assertFalse(matches.matches());
Assertions.assertEquals(1, matches.getExtensions().size());
}
@Test
void testMultiMatchByArtifactIdsAndNames() {
Extension e1 = new Extension("org.acme", "e1", "1.0")
.setName("foo")
.setLabels(new String[] { "foo", "bar" });
Extension e2 = new Extension("org.acme", "quarkus-foo", "1.0")
.setName("some foo bar")
.setLabels(new String[] { "foo", "bar", "baz" });
Extension e3 = new Extension("org.acme", "e3", "1.0")
.setName("unrelated")
.setLabels(new String[] { "foo" });
List<Extension> extensions = asList(e1, e2, e3);
Collections.shuffle(extensions);
SelectionResult matches = AddExtensions.select("foo", extensions);
Assertions.assertFalse(matches.matches());
Assertions.assertEquals(3, matches.getExtensions().size());
}
@Test
void testShortNameSelection() {
Extension e1 = new Extension("org.acme", "some-complex-seo-unaware-artifactid", "1.0")
.setName("some complex seo unaware name")
.setShortName("foo")
.setLabels(new String[] { "foo", "bar" });
Extension e2 = new Extension("org.acme", "some-foo-bar", "1.0")
.setName("some foo bar")
.setLabels(new String[] { "foo", "bar", "baz" });
Extension e3 = new Extension("org.acme", "unrelated", "1.0")
.setName("unrelated")
.setLabels(new String[] { "foo" });
List<Extension> extensions = asList(e1, e2, e3);
Collections.shuffle(extensions);
SelectionResult matches = AddExtensions.select("foo", extensions);
Assertions.assertTrue(matches.matches());
Assertions.assertEquals(1, matches.getExtensions().size());
Assertions.assertNotNull(matches.getMatch());
Assertions.assertTrue(matches.getMatch().getArtifactId().equalsIgnoreCase("some-complex-seo-unaware-artifactid"));
}
@Test
void testArtifactIdSelectionWithQuarkusPrefix() {
Extension e1 = new Extension("org.acme", "quarkus-foo", "1.0")
.setName("some complex seo unaware name")
.setShortName("foo")
.setLabels(new String[] { "foo", "bar" });
Extension e2 = new Extension("org.acme", "quarkus-foo-bar", "1.0")
.setName("some foo bar")
.setLabels(new String[] { "foo", "bar", "baz" });
Extension e3 = new Extension("org.acme", "quarkus-unrelated", "1.0")
.setName("unrelated")
.setLabels(new String[] { "foo" });
List<Extension> extensions = asList(e1, e2, e3);
Collections.shuffle(extensions);
SelectionResult matches = AddExtensions.select("foo", extensions);
Assertions.assertEquals(1, matches.getExtensions().size());
Assertions.assertNotNull(matches.getMatch());
Assertions.assertTrue(matches.getMatch().getArtifactId().equalsIgnoreCase("quarkus-foo"));
}
@Test
void testArtifactIdSelectionWithQuarkusSmallRyePrefix() {
Extension e1 = new Extension("org.acme", "quarkus-smallrye-foo", "1.0")
.setName("some complex seo unaware name")
.setShortName("foo")
.setLabels(new String[] { "foo", "bar" });
Extension e2 = new Extension("org.acme", "quarkus-foo-bar", "1.0")
.setName("some foo bar")
.setLabels(new String[] { "foo", "bar", "baz" });
Extension e3 = new Extension("org.acme", "quarkus-unrelated", "1.0")
.setName("unrelated")
.setLabels(new String[] { "foo" });
List<Extension> extensions = asList(e1, e2, e3);
Collections.shuffle(extensions);
SelectionResult matches = AddExtensions.select("foo", extensions);
Assertions.assertEquals(1, matches.getExtensions().size());
Assertions.assertNotNull(matches.getMatch());
Assertions.assertTrue(matches.getMatch().getArtifactId().equalsIgnoreCase("quarkus-smallrye-foo"));
}
@Test
void addDuplicatedExtension() throws IOException {
final File pom = new File("target/extensions-test", "pom.xml");
CreateProjectTest.delete(pom.getParentFile());
new CreateProject(new FileProjectWriter(pom.getParentFile()))
.groupId("org.acme")
.artifactId("add-extension-test")
.version("0.0.1-SNAPSHOT")
.doCreateProject(new HashMap<>());
File pomFile = new File(pom.getAbsolutePath());
new AddExtensions(new FileProjectWriter(pomFile.getParentFile()), pomFile.getName())
.addExtensions(new HashSet<>(asList("agroal", "jdbc", "non-exist-ent")));
Model model = MojoUtils.readPom(pom);
hasDependency(model, "quarkus-agroal");
doesNotHaveDependency(model, "quarkus-jdbc-postgresql");
doesNotHaveDependency(model, "quarkus-jdbc-h2");
}
private void hasDependency(final Model model, final String artifactId) {
Assertions.assertTrue(model.getDependencies()
.stream()
.anyMatch(d -> d.getGroupId().equals(MojoUtils.getPluginGroupId()) &&
d.getArtifactId().equals(artifactId)));
}
private void hasDependency(final Model model, String groupId, String artifactId, String version) {
Assertions.assertTrue(model.getDependencies()
.stream()
.anyMatch(d -> d.getGroupId().equals(groupId) &&
d.getArtifactId().equals(artifactId) &&
d.getVersion().equals(version)));
}
private void doesNotHaveDependency(final Model model, final String artifactId) {
Assertions.assertFalse(model.getDependencies()
.stream()
.anyMatch(d -> d.getGroupId().equals(MojoUtils.getPluginGroupId()) &&
d.getArtifactId().equals(artifactId)));
}
} | class AddExtensionsTest {
@Test
@Test
void testPartialMatches() throws IOException {
File pom = new File("target/extensions-test", "pom.xml");
CreateProjectTest.delete(pom.getParentFile());
new CreateProject(new FileProjectWriter(pom.getParentFile()))
.groupId("org.acme")
.artifactId("add-extension-test")
.version("0.0.1-SNAPSHOT")
.doCreateProject(new HashMap<>());
File pomFile = new File(pom.getAbsolutePath());
new AddExtensions(new FileProjectWriter(pomFile.getParentFile()), pomFile.getName())
.addExtensions(new HashSet<>(asList("orm-pana", "jdbc-postgre", "arc")));
Model model = MojoUtils.readPom(pom);
hasDependency(model, "quarkus-arc");
hasDependency(model, "quarkus-hibernate-orm-panache");
hasDependency(model, "quarkus-jdbc-postgresql");
}
@Test
void addMissingExtension() throws IOException {
final File pom = new File("target/extensions-test", "pom.xml");
CreateProjectTest.delete(pom.getParentFile());
new CreateProject(new FileProjectWriter(pom.getParentFile()))
.groupId("org.acme")
.artifactId("add-extension-test")
.version("0.0.1-SNAPSHOT")
.doCreateProject(new HashMap<>());
File pomFile = new File(pom.getAbsolutePath());
AddExtensionResult result = new AddExtensions(new FileProjectWriter(pomFile.getParentFile()), pomFile.getName())
.addExtensions(new HashSet<>(Collections.singletonList("missing")));
Model model = MojoUtils.readPom(pom);
doesNotHaveDependency(model, "quarkus-missing");
Assertions.assertFalse(result.succeeded());
Assertions.assertFalse(result.isUpdated());
}
@Test
void addExtensionTwiceInOneBatch() throws IOException {
final File pom = new File("target/extensions-test", "pom.xml");
CreateProjectTest.delete(pom.getParentFile());
new CreateProject(new FileProjectWriter(pom.getParentFile()))
.groupId("org.acme")
.artifactId("add-extension-test")
.version("0.0.1-SNAPSHOT")
.doCreateProject(new HashMap<>());
File pomFile = new File(pom.getAbsolutePath());
AddExtensionResult result = new AddExtensions(new FileProjectWriter(pomFile.getParentFile()), pomFile.getName())
.addExtensions(new HashSet<>(asList("agroal", "agroal")));
Model model = MojoUtils.readPom(pom);
hasDependency(model, "quarkus-agroal");
Assertions.assertEquals(1,
model.getDependencies().stream().filter(d -> d.getArtifactId().equals("quarkus-agroal")).count());
Assertions.assertTrue(result.isUpdated());
Assertions.assertTrue(result.succeeded());
}
@Test
void addExtensionTwiceInTwoBatches() throws IOException {
final File pom = new File("target/extensions-test", "pom.xml");
CreateProjectTest.delete(pom.getParentFile());
new CreateProject(new FileProjectWriter(pom.getParentFile()))
.groupId("org.acme")
.artifactId("add-extension-test")
.version("0.0.1-SNAPSHOT")
.doCreateProject(new HashMap<>());
File pomFile = new File(pom.getAbsolutePath());
AddExtensionResult result = new AddExtensions(new FileProjectWriter(pomFile.getParentFile()), pomFile.getName())
.addExtensions(new HashSet<>(Collections.singletonList("agroal")));
Model model = MojoUtils.readPom(pom);
hasDependency(model, "quarkus-agroal");
Assertions.assertEquals(1,
model.getDependencies().stream().filter(d -> d.getArtifactId().equals("quarkus-agroal")).count());
Assertions.assertTrue(result.isUpdated());
Assertions.assertTrue(result.succeeded());
AddExtensionResult result2 = new AddExtensions(new FileProjectWriter(pomFile.getParentFile()), pomFile.getName())
.addExtensions(new HashSet<>(Collections.singletonList("agroal")));
model = MojoUtils.readPom(pom);
hasDependency(model, "quarkus-agroal");
Assertions.assertEquals(1,
model.getDependencies().stream().filter(d -> d.getArtifactId().equals("quarkus-agroal")).count());
Assertions.assertFalse(result2.isUpdated());
Assertions.assertTrue(result2.succeeded());
}
/**
* This test reproduce the issue we had using the first selection algorithm.
* The `arc` query was matching ArC but also hibernate-search-elasticsearch.
*/
@Test
void testPartialMatchConflict() throws IOException {
final File pom = new File("target/extensions-test", "pom.xml");
CreateProjectTest.delete(pom.getParentFile());
new CreateProject(new FileProjectWriter(pom.getParentFile()))
.groupId("org.acme")
.artifactId("add-extension-test")
.version("0.0.1-SNAPSHOT")
.doCreateProject(new HashMap<>());
File pomFile = new File(pom.getAbsolutePath());
AddExtensionResult result = new AddExtensions(new FileProjectWriter(pomFile.getParentFile()), pomFile.getName())
.addExtensions(new HashSet<>(Collections.singletonList("arc")));
Assertions.assertTrue(result.isUpdated());
Assertions.assertTrue(result.succeeded());
Model model = MojoUtils.readPom(pom);
hasDependency(model, "quarkus-arc");
result = new AddExtensions(new FileProjectWriter(pomFile.getParentFile()), pomFile.getName())
.addExtensions(new HashSet<>(Collections.singletonList("elasticsearch")));
Assertions.assertTrue(result.isUpdated());
Assertions.assertTrue(result.succeeded());
model = MojoUtils.readPom(pom);
hasDependency(model, "quarkus-hibernate-search-elasticsearch");
}
@Test
void addExistingAndMissingExtensions() throws IOException {
final File pom = new File("target/extensions-test", "pom.xml");
CreateProjectTest.delete(pom.getParentFile());
new CreateProject(new FileProjectWriter(pom.getParentFile()))
.groupId("org.acme")
.artifactId("add-extension-test")
.version("0.0.1-SNAPSHOT")
.doCreateProject(new HashMap<>());
File pomFile = new File(pom.getAbsolutePath());
AddExtensionResult result = new AddExtensions(new FileProjectWriter(pomFile.getParentFile()), pomFile.getName())
.addExtensions(new HashSet<>(asList("missing", "agroal")));
Model model = MojoUtils.readPom(pom);
doesNotHaveDependency(model, "quarkus-missing");
hasDependency(model, "quarkus-agroal");
Assertions.assertFalse(result.succeeded());
Assertions.assertTrue(result.isUpdated());
}
@Test
void testMultiMatchByLabels() {
Extension e1 = new Extension("org.acme", "e1", "1.0")
.setName("some extension 1")
.setLabels(new String[] { "foo", "bar" });
Extension e2 = new Extension("org.acme", "e2", "1.0")
.setName("some extension 2")
.setLabels(new String[] { "foo", "bar", "baz" });
Extension e3 = new Extension("org.acme", "e3", "1.0")
.setName("unrelated")
.setLabels(new String[] { "bar" });
List<Extension> extensions = asList(e1, e2, e3);
Collections.shuffle(extensions);
SelectionResult matches = AddExtensions.select("foo", extensions, true);
Assertions.assertFalse(matches.matches());
Assertions.assertEquals(2, matches.getExtensions().size());
matches = AddExtensions.select("foo", extensions, false);
Assertions.assertFalse(matches.matches());
Assertions.assertEquals(0, matches.getExtensions().size());
}
@Test
void testThatSingleLabelMatchIsNotAMatch() {
Extension e1 = new Extension("org.acme", "e1", "1.0")
.setName("e1")
.setLabels(new String[] { "foo", "bar" });
Extension e2 = new Extension("org.acme", "e2", "1.0")
.setName("e2")
.setLabels(new String[] { "bar", "baz" });
List<Extension> extensions = asList(e1, e2);
Collections.shuffle(extensions);
SelectionResult matches = AddExtensions.select("foo", extensions, true);
Assertions.assertFalse(matches.matches());
Assertions.assertEquals(1, matches.getExtensions().size());
}
@Test
void testMultiMatchByArtifactIdsAndNames() {
Extension e1 = new Extension("org.acme", "e1", "1.0")
.setName("foo")
.setLabels(new String[] { "foo", "bar" });
Extension e2 = new Extension("org.acme", "quarkus-foo", "1.0")
.setName("some foo bar")
.setLabels(new String[] { "foo", "bar", "baz" });
Extension e3 = new Extension("org.acme", "e3", "1.0")
.setName("unrelated")
.setLabels(new String[] { "foo" });
List<Extension> extensions = asList(e1, e2, e3);
Collections.shuffle(extensions);
SelectionResult matches = AddExtensions.select("foo", extensions, false);
Assertions.assertFalse(matches.matches());
Assertions.assertEquals(2, matches.getExtensions().size());
matches = AddExtensions.select("foo", extensions, true);
Assertions.assertFalse(matches.matches());
Assertions.assertEquals(3, matches.getExtensions().size());
}
@Test
void testShortNameSelection() {
Extension e1 = new Extension("org.acme", "some-complex-seo-unaware-artifactid", "1.0")
.setName("some complex seo unaware name")
.setShortName("foo")
.setLabels(new String[] { "foo", "bar" });
Extension e2 = new Extension("org.acme", "some-foo-bar", "1.0")
.setName("some foo bar")
.setLabels(new String[] { "foo", "bar", "baz" });
Extension e3 = new Extension("org.acme", "unrelated", "1.0")
.setName("unrelated")
.setLabels(new String[] { "foo" });
List<Extension> extensions = asList(e1, e2, e3);
Collections.shuffle(extensions);
SelectionResult matches = AddExtensions.select("foo", extensions, false);
Assertions.assertTrue(matches.matches());
Assertions.assertEquals(1, matches.getExtensions().size());
Assertions.assertNotNull(matches.getMatch());
Assertions.assertTrue(matches.getMatch().getArtifactId().equalsIgnoreCase("some-complex-seo-unaware-artifactid"));
}
@Test
void testArtifactIdSelectionWithQuarkusPrefix() {
Extension e1 = new Extension("org.acme", "quarkus-foo", "1.0")
.setName("some complex seo unaware name")
.setShortName("foo")
.setLabels(new String[] { "foo", "bar" });
Extension e2 = new Extension("org.acme", "quarkus-foo-bar", "1.0")
.setName("some foo bar")
.setLabels(new String[] { "foo", "bar", "baz" });
Extension e3 = new Extension("org.acme", "quarkus-unrelated", "1.0")
.setName("unrelated")
.setLabels(new String[] { "foo" });
List<Extension> extensions = asList(e1, e2, e3);
Collections.shuffle(extensions);
SelectionResult matches = AddExtensions.select("foo", extensions, false);
Assertions.assertEquals(1, matches.getExtensions().size());
Assertions.assertNotNull(matches.getMatch());
Assertions.assertTrue(matches.getMatch().getArtifactId().equalsIgnoreCase("quarkus-foo"));
}
@Test
void addDuplicatedExtension() throws IOException {
final File pom = new File("target/extensions-test", "pom.xml");
CreateProjectTest.delete(pom.getParentFile());
new CreateProject(new FileProjectWriter(pom.getParentFile()))
.groupId("org.acme")
.artifactId("add-extension-test")
.version("0.0.1-SNAPSHOT")
.doCreateProject(new HashMap<>());
File pomFile = new File(pom.getAbsolutePath());
new AddExtensions(new FileProjectWriter(pomFile.getParentFile()), pomFile.getName())
.addExtensions(new HashSet<>(asList("agroal", "jdbc", "non-exist-ent")));
Model model = MojoUtils.readPom(pom);
hasDependency(model, "quarkus-agroal");
doesNotHaveDependency(model, "quarkus-jdbc-postgresql");
doesNotHaveDependency(model, "quarkus-jdbc-h2");
}
@Test
void addDuplicatedExtensionUsingGAV() throws IOException {
final File pom = new File("target/extensions-test", "pom.xml");
CreateProjectTest.delete(pom.getParentFile());
new CreateProject(new FileProjectWriter(pom.getParentFile()))
.groupId("org.acme")
.artifactId("add-extension-test")
.version("0.0.1-SNAPSHOT")
.doCreateProject(new HashMap<>());
File pomFile = new File(pom.getAbsolutePath());
new AddExtensions(new FileProjectWriter(pomFile.getParentFile()), pomFile.getName())
.addExtensions(new HashSet<>(asList("org.acme:acme:1", "org.acme:acme:1")));
Model model = MojoUtils.readPom(pom);
hasDependency(model, "org.acme", "acme", "1");
Assertions.assertEquals(1,
model.getDependencies().stream().filter(d -> d.getArtifactId().equalsIgnoreCase("acme")).count());
}
@Test
void testVertxWithAndWithoutDot() throws IOException {
final File pom = new File("target/extensions-test", "pom.xml");
CreateProjectTest.delete(pom.getParentFile());
new CreateProject(new FileProjectWriter(pom.getParentFile()))
.groupId("org.acme")
.artifactId("add-extension-test")
.version("0.0.1-SNAPSHOT")
.doCreateProject(new HashMap<>());
File pomFile = new File(pom.getAbsolutePath());
new AddExtensions(new FileProjectWriter(pomFile.getParentFile()), pomFile.getName())
.addExtensions(new HashSet<>(Collections.singletonList("vertx")));
Model model = MojoUtils.readPom(pom);
hasDependency(model, "quarkus-vertx");
CreateProjectTest.delete(pom.getParentFile());
new CreateProject(new FileProjectWriter(pom.getParentFile()))
.groupId("org.acme")
.artifactId("add-extension-test")
.version("0.0.1-SNAPSHOT")
.doCreateProject(new HashMap<>());
pomFile = new File(pom.getAbsolutePath());
new AddExtensions(new FileProjectWriter(pomFile.getParentFile()), pomFile.getName())
.addExtensions(new HashSet<>(Collections.singletonList("vert.x")));
model = MojoUtils.readPom(pom);
hasDependency(model, "quarkus-vertx");
}
private void hasDependency(final Model model, final String artifactId) {
Assertions.assertTrue(model.getDependencies()
.stream()
.anyMatch(d -> d.getGroupId().equals(MojoUtils.getPluginGroupId()) &&
d.getArtifactId().equals(artifactId)));
}
private void hasDependency(final Model model, String groupId, String artifactId, String version) {
Assertions.assertTrue(model.getDependencies()
.stream()
.anyMatch(d -> d.getGroupId().equals(groupId) &&
d.getArtifactId().equals(artifactId) &&
d.getVersion().equals(version)));
}
private void doesNotHaveDependency(final Model model, final String artifactId) {
Assertions.assertFalse(model.getDependencies()
.stream()
.anyMatch(d -> d.getGroupId().equals(MojoUtils.getPluginGroupId()) &&
d.getArtifactId().equals(artifactId)));
}
} |
I assume this makes it so that `<strong>` is rendered correctly by our Javadoc doclet? | private void appendHtml(StringBuilder sb, Node node) {
for (Node childNode : node.childNodes()) {
switch (childNode.nodeName()) {
case PARAGRAPH_NODE:
sb.append(NEW_LINE);
appendHtml(sb, childNode);
break;
case ORDERED_LIST_NODE:
case UN_ORDERED_LIST_NODE:
appendHtml(sb, childNode);
break;
case LIST_ITEM_NODE:
final String marker = childNode.parent().nodeName().equals(ORDERED_LIST_NODE)
? ORDERED_LIST_ITEM_ASCIDOC_STYLE
: UNORDERED_LIST_ITEM_ASCIDOC_STYLE;
sb.append(NEW_LINE);
sb.append(marker);
appendHtml(sb, childNode);
break;
case LINK_NODE:
final String link = childNode.attr(HREF_ATTRIBUTE);
sb.append("link:");
sb.append(link);
final StringBuilder caption = new StringBuilder();
appendHtml(caption, childNode);
sb.append(String.format(LINK_ATTRIBUTE_FORMAT, caption.toString().trim()));
break;
case CODE_NODE:
sb.append(BACKTICK);
appendHtml(sb, childNode);
sb.append(BACKTICK);
break;
case BOLD_NODE:
case STRONG_NODE:
sb.append(STAR);
appendHtml(sb, childNode);
sb.append(STAR);
break;
case EMPHASIS_NODE:
case ITALICS_NODE:
sb.append(UNDERSCORE);
appendHtml(sb, childNode);
sb.append(UNDERSCORE);
break;
case UNDERLINE_NODE:
sb.append(UNDERLINE_ASCIDOC_STYLE);
sb.append(HASH);
appendHtml(sb, childNode);
sb.append(HASH);
break;
case SMALL_NODE:
sb.append(SMALL_ASCIDOC_STYLE);
sb.append(HASH);
appendHtml(sb, childNode);
sb.append(HASH);
break;
case BIG_NODE:
sb.append(BIG_ASCIDOC_STYLE);
sb.append(HASH);
appendHtml(sb, childNode);
sb.append(HASH);
break;
case SUB_SCRIPT_NODE:
sb.append(SUB_SCRIPT_ASCIDOC_STYLE);
appendHtml(sb, childNode);
sb.append(SUB_SCRIPT_ASCIDOC_STYLE);
break;
case SUPER_SCRIPT_NODE:
sb.append(SUPER_SCRIPT_ASCIDOC_STYLE);
appendHtml(sb, childNode);
sb.append(SUPER_SCRIPT_ASCIDOC_STYLE);
break;
case DEL_NODE:
case S_NODE:
case STRIKE_NODE:
sb.append(LINE_THROUGH_ASCIDOC_STYLE);
sb.append(HASH);
appendHtml(sb, childNode);
sb.append(HASH);
break;
case NEW_LINE_NODE:
sb.append(NEW_LINE);
break;
case TEXT_NODE:
String text = ((TextNode) childNode).text();
if (text.isEmpty()) {
break;
}
final var startingSpaceMatcher = STARTING_SPACE.matcher(text);
if (sb.length() > 0 && '\n' == sb.charAt(sb.length() - 1) && startingSpaceMatcher.find()) {
text = startingSpaceMatcher.replaceFirst("");
}
appendEscapedAsciiDoc(sb, text);
break;
default:
appendHtml(sb, childNode);
break;
}
}
} | break; | private void appendHtml(StringBuilder sb, Node node) {
for (Node childNode : node.childNodes()) {
switch (childNode.nodeName()) {
case PARAGRAPH_NODE:
sb.append(NEW_LINE);
appendHtml(sb, childNode);
break;
case ORDERED_LIST_NODE:
case UN_ORDERED_LIST_NODE:
appendHtml(sb, childNode);
break;
case LIST_ITEM_NODE:
final String marker = childNode.parent().nodeName().equals(ORDERED_LIST_NODE)
? ORDERED_LIST_ITEM_ASCIDOC_STYLE
: UNORDERED_LIST_ITEM_ASCIDOC_STYLE;
sb.append(NEW_LINE);
sb.append(marker);
appendHtml(sb, childNode);
break;
case LINK_NODE:
final String link = childNode.attr(HREF_ATTRIBUTE);
sb.append("link:");
sb.append(link);
final StringBuilder caption = new StringBuilder();
appendHtml(caption, childNode);
sb.append(String.format(LINK_ATTRIBUTE_FORMAT, caption.toString().trim()));
break;
case CODE_NODE:
sb.append(BACKTICK);
appendHtml(sb, childNode);
sb.append(BACKTICK);
break;
case BOLD_NODE:
case STRONG_NODE:
sb.append(STAR);
appendHtml(sb, childNode);
sb.append(STAR);
break;
case EMPHASIS_NODE:
case ITALICS_NODE:
sb.append(UNDERSCORE);
appendHtml(sb, childNode);
sb.append(UNDERSCORE);
break;
case UNDERLINE_NODE:
sb.append(UNDERLINE_ASCIDOC_STYLE);
sb.append(HASH);
appendHtml(sb, childNode);
sb.append(HASH);
break;
case SMALL_NODE:
sb.append(SMALL_ASCIDOC_STYLE);
sb.append(HASH);
appendHtml(sb, childNode);
sb.append(HASH);
break;
case BIG_NODE:
sb.append(BIG_ASCIDOC_STYLE);
sb.append(HASH);
appendHtml(sb, childNode);
sb.append(HASH);
break;
case SUB_SCRIPT_NODE:
sb.append(SUB_SCRIPT_ASCIDOC_STYLE);
appendHtml(sb, childNode);
sb.append(SUB_SCRIPT_ASCIDOC_STYLE);
break;
case SUPER_SCRIPT_NODE:
sb.append(SUPER_SCRIPT_ASCIDOC_STYLE);
appendHtml(sb, childNode);
sb.append(SUPER_SCRIPT_ASCIDOC_STYLE);
break;
case DEL_NODE:
case S_NODE:
case STRIKE_NODE:
sb.append(LINE_THROUGH_ASCIDOC_STYLE);
sb.append(HASH);
appendHtml(sb, childNode);
sb.append(HASH);
break;
case NEW_LINE_NODE:
sb.append(NEW_LINE);
break;
case TEXT_NODE:
String text = ((TextNode) childNode).text();
if (text.isEmpty()) {
break;
}
final var startingSpaceMatcher = STARTING_SPACE.matcher(text);
if (sb.length() > 0 && '\n' == sb.charAt(sb.length() - 1) && startingSpaceMatcher.find()) {
text = startingSpaceMatcher.replaceFirst("");
}
appendEscapedAsciiDoc(sb, text);
break;
default:
appendHtml(sb, childNode);
break;
}
}
} | class JavaDocParser {
private static final Pattern START_OF_LINE = Pattern.compile("^", Pattern.MULTILINE);
private static final Pattern REPLACE_WINDOWS_EOL = Pattern.compile("\r\n");
private static final Pattern REPLACE_MACOS_EOL = Pattern.compile("\r");
private static final Pattern STARTING_SPACE = Pattern.compile("^ +");
private static final String BACKTICK = "`";
private static final String HASH = "
private static final String STAR = "*";
private static final String S_NODE = "s";
private static final String UNDERSCORE = "_";
private static final String NEW_LINE = "\n";
private static final String LINK_NODE = "a";
private static final String BOLD_NODE = "b";
private static final String STRONG_NODE = "strong";
private static final String BIG_NODE = "big";
private static final String CODE_NODE = "code";
private static final String DEL_NODE = "del";
private static final String ITALICS_NODE = "i";
private static final String EMPHASIS_NODE = "em";
private static final String TEXT_NODE = "
private static final String UNDERLINE_NODE = "u";
private static final String NEW_LINE_NODE = "br";
private static final String PARAGRAPH_NODE = "p";
private static final String SMALL_NODE = "small";
private static final String LIST_ITEM_NODE = "li";
private static final String HREF_ATTRIBUTE = "href";
private static final String STRIKE_NODE = "strike";
private static final String SUB_SCRIPT_NODE = "sub";
private static final String ORDERED_LIST_NODE = "ol";
private static final String SUPER_SCRIPT_NODE = "sup";
private static final String UN_ORDERED_LIST_NODE = "ul";
private static final String BIG_ASCIDOC_STYLE = "[.big]";
private static final String LINK_ATTRIBUTE_FORMAT = "[%s]";
private static final String SUB_SCRIPT_ASCIDOC_STYLE = "~";
private static final String SUPER_SCRIPT_ASCIDOC_STYLE = "^";
private static final String SMALL_ASCIDOC_STYLE = "[.small]";
private static final String ORDERED_LIST_ITEM_ASCIDOC_STYLE = " . ";
private static final String UNORDERED_LIST_ITEM_ASCIDOC_STYLE = " - ";
private static final String UNDERLINE_ASCIDOC_STYLE = "[.underline]";
private static final String LINE_THROUGH_ASCIDOC_STYLE = "[.line-through]";
private final boolean inlineMacroMode;
public JavaDocParser(boolean inlineMacroMode) {
this.inlineMacroMode = inlineMacroMode;
}
public JavaDocParser() {
this(false);
}
public String parseConfigDescription(String javadocComment) {
if (javadocComment == null || javadocComment.trim().isEmpty()) {
return Constants.EMPTY;
}
javadocComment = START_OF_LINE.matcher(javadocComment).replaceAll("* ");
Javadoc javadoc = StaticJavaParser.parseJavadoc(javadocComment);
if (isAsciidoc(javadoc)) {
return handleEolInAsciidoc(javadoc);
}
return htmlJavadocToAsciidoc(javadoc.getDescription());
}
public SectionHolder parseConfigSection(String javadocComment, int sectionLevel) {
if (javadocComment == null || javadocComment.trim().isEmpty()) {
return new SectionHolder(Constants.EMPTY, Constants.EMPTY);
}
javadocComment = START_OF_LINE.matcher(javadocComment).replaceAll("* ");
Javadoc javadoc = StaticJavaParser.parseJavadoc(javadocComment);
if (isAsciidoc(javadoc)) {
final String details = handleEolInAsciidoc(javadoc);
final int endOfTitleIndex = details.indexOf(Constants.DOT);
final String title = details.substring(0, endOfTitleIndex).replaceAll("^([^\\w])+", Constants.EMPTY).trim();
return new SectionHolder(title, details);
}
return generateConfigSection(javadoc, sectionLevel);
}
private SectionHolder generateConfigSection(Javadoc javadoc, int sectionLevel) {
final String generatedAsciiDoc = htmlJavadocToAsciidoc(javadoc.getDescription());
if (generatedAsciiDoc.isEmpty()) {
return new SectionHolder(Constants.EMPTY, Constants.EMPTY);
}
final String beginSectionDetails = IntStream
.rangeClosed(0, Math.max(0, sectionLevel))
.mapToObj(x -> "=").collect(Collectors.joining())
+ " ";
final int endOfTitleIndex = generatedAsciiDoc.indexOf(Constants.DOT);
if (endOfTitleIndex == -1) {
return new SectionHolder(generatedAsciiDoc.trim(), beginSectionDetails + generatedAsciiDoc);
} else {
final String title = generatedAsciiDoc.substring(0, endOfTitleIndex).trim();
final String introduction = generatedAsciiDoc.substring(endOfTitleIndex + 1).trim();
final String details = beginSectionDetails + title + "\n\n" + introduction;
return new SectionHolder(title, details.trim());
}
}
private String handleEolInAsciidoc(Javadoc javadoc) {
String asciidoc = javadoc.getDescription().toText();
asciidoc = REPLACE_WINDOWS_EOL.matcher(asciidoc).replaceAll("\n");
asciidoc = REPLACE_MACOS_EOL.matcher(asciidoc).replaceAll("\n");
return asciidoc;
}
private boolean isAsciidoc(Javadoc javadoc) {
for (JavadocBlockTag blockTag : javadoc.getBlockTags()) {
if ("asciidoclet".equals(blockTag.getTagName())) {
return true;
}
}
return false;
}
private String htmlJavadocToAsciidoc(JavadocDescription javadocDescription) {
StringBuilder sb = new StringBuilder();
for (JavadocDescriptionElement javadocDescriptionElement : javadocDescription.getElements()) {
if (javadocDescriptionElement instanceof JavadocInlineTag) {
JavadocInlineTag inlineTag = (JavadocInlineTag) javadocDescriptionElement;
String content = inlineTag.getContent().trim();
switch (inlineTag.getType()) {
case CODE:
case VALUE:
case LITERAL:
case SYSTEM_PROPERTY:
sb.append('`');
appendEscapedAsciiDoc(sb, content);
sb.append('`');
break;
case LINK:
case LINKPLAIN:
if (content.startsWith(HASH)) {
content = hyphenate(content.substring(1));
}
sb.append('`');
appendEscapedAsciiDoc(sb, content);
sb.append('`');
break;
default:
sb.append(content);
break;
}
} else {
appendHtml(sb, Jsoup.parseBodyFragment(javadocDescriptionElement.toText()));
}
}
return sb.toString().trim();
}
private StringBuilder appendEscapedAsciiDoc(StringBuilder sb, String text) {
boolean escaping = false;
for (int i = 0; i < text.length(); i++) {
final char ch = text.charAt(i);
switch (ch) {
case ']':
if (inlineMacroMode) {
if (escaping) {
sb.append("++");
escaping = false;
}
sb.append("&
break;
}
case '
case '*':
case '\\':
case '{':
case '}':
case '[':
case '|':
if (!escaping) {
sb.append("++");
escaping = true;
}
sb.append(ch);
break;
case '+':
if (escaping) {
sb.append("++");
escaping = false;
}
sb.append("{plus}");
break;
default:
if (escaping) {
sb.append("++");
escaping = false;
}
sb.append(ch);
}
}
if (escaping) {
sb.append("++");
}
return sb;
}
static class SectionHolder {
final String title;
final String details;
public SectionHolder(String title, String details) {
this.title = title;
this.details = details;
}
}
} | class JavaDocParser {
private static final Pattern START_OF_LINE = Pattern.compile("^", Pattern.MULTILINE);
private static final Pattern REPLACE_WINDOWS_EOL = Pattern.compile("\r\n");
private static final Pattern REPLACE_MACOS_EOL = Pattern.compile("\r");
private static final Pattern STARTING_SPACE = Pattern.compile("^ +");
private static final String BACKTICK = "`";
private static final String HASH = "
private static final String STAR = "*";
private static final String S_NODE = "s";
private static final String UNDERSCORE = "_";
private static final String NEW_LINE = "\n";
private static final String LINK_NODE = "a";
private static final String BOLD_NODE = "b";
private static final String STRONG_NODE = "strong";
private static final String BIG_NODE = "big";
private static final String CODE_NODE = "code";
private static final String DEL_NODE = "del";
private static final String ITALICS_NODE = "i";
private static final String EMPHASIS_NODE = "em";
private static final String TEXT_NODE = "
private static final String UNDERLINE_NODE = "u";
private static final String NEW_LINE_NODE = "br";
private static final String PARAGRAPH_NODE = "p";
private static final String SMALL_NODE = "small";
private static final String LIST_ITEM_NODE = "li";
private static final String HREF_ATTRIBUTE = "href";
private static final String STRIKE_NODE = "strike";
private static final String SUB_SCRIPT_NODE = "sub";
private static final String ORDERED_LIST_NODE = "ol";
private static final String SUPER_SCRIPT_NODE = "sup";
private static final String UN_ORDERED_LIST_NODE = "ul";
private static final String BIG_ASCIDOC_STYLE = "[.big]";
private static final String LINK_ATTRIBUTE_FORMAT = "[%s]";
private static final String SUB_SCRIPT_ASCIDOC_STYLE = "~";
private static final String SUPER_SCRIPT_ASCIDOC_STYLE = "^";
private static final String SMALL_ASCIDOC_STYLE = "[.small]";
private static final String ORDERED_LIST_ITEM_ASCIDOC_STYLE = " . ";
private static final String UNORDERED_LIST_ITEM_ASCIDOC_STYLE = " - ";
private static final String UNDERLINE_ASCIDOC_STYLE = "[.underline]";
private static final String LINE_THROUGH_ASCIDOC_STYLE = "[.line-through]";
private final boolean inlineMacroMode;
public JavaDocParser(boolean inlineMacroMode) {
this.inlineMacroMode = inlineMacroMode;
}
public JavaDocParser() {
this(false);
}
public String parseConfigDescription(String javadocComment) {
if (javadocComment == null || javadocComment.trim().isEmpty()) {
return Constants.EMPTY;
}
javadocComment = START_OF_LINE.matcher(javadocComment).replaceAll("* ");
Javadoc javadoc = StaticJavaParser.parseJavadoc(javadocComment);
if (isAsciidoc(javadoc)) {
return handleEolInAsciidoc(javadoc);
}
return htmlJavadocToAsciidoc(javadoc.getDescription());
}
public SectionHolder parseConfigSection(String javadocComment, int sectionLevel) {
if (javadocComment == null || javadocComment.trim().isEmpty()) {
return new SectionHolder(Constants.EMPTY, Constants.EMPTY);
}
javadocComment = START_OF_LINE.matcher(javadocComment).replaceAll("* ");
Javadoc javadoc = StaticJavaParser.parseJavadoc(javadocComment);
if (isAsciidoc(javadoc)) {
final String details = handleEolInAsciidoc(javadoc);
final int endOfTitleIndex = details.indexOf(Constants.DOT);
final String title = details.substring(0, endOfTitleIndex).replaceAll("^([^\\w])+", Constants.EMPTY).trim();
return new SectionHolder(title, details);
}
return generateConfigSection(javadoc, sectionLevel);
}
private SectionHolder generateConfigSection(Javadoc javadoc, int sectionLevel) {
final String generatedAsciiDoc = htmlJavadocToAsciidoc(javadoc.getDescription());
if (generatedAsciiDoc.isEmpty()) {
return new SectionHolder(Constants.EMPTY, Constants.EMPTY);
}
final String beginSectionDetails = IntStream
.rangeClosed(0, Math.max(0, sectionLevel))
.mapToObj(x -> "=").collect(Collectors.joining())
+ " ";
final int endOfTitleIndex = generatedAsciiDoc.indexOf(Constants.DOT);
if (endOfTitleIndex == -1) {
return new SectionHolder(generatedAsciiDoc.trim(), beginSectionDetails + generatedAsciiDoc);
} else {
final String title = generatedAsciiDoc.substring(0, endOfTitleIndex).trim();
final String introduction = generatedAsciiDoc.substring(endOfTitleIndex + 1).trim();
final String details = beginSectionDetails + title + "\n\n" + introduction;
return new SectionHolder(title, details.trim());
}
}
private String handleEolInAsciidoc(Javadoc javadoc) {
String asciidoc = javadoc.getDescription().toText();
asciidoc = REPLACE_WINDOWS_EOL.matcher(asciidoc).replaceAll("\n");
asciidoc = REPLACE_MACOS_EOL.matcher(asciidoc).replaceAll("\n");
return asciidoc;
}
private boolean isAsciidoc(Javadoc javadoc) {
for (JavadocBlockTag blockTag : javadoc.getBlockTags()) {
if ("asciidoclet".equals(blockTag.getTagName())) {
return true;
}
}
return false;
}
private String htmlJavadocToAsciidoc(JavadocDescription javadocDescription) {
StringBuilder sb = new StringBuilder();
for (JavadocDescriptionElement javadocDescriptionElement : javadocDescription.getElements()) {
if (javadocDescriptionElement instanceof JavadocInlineTag) {
JavadocInlineTag inlineTag = (JavadocInlineTag) javadocDescriptionElement;
String content = inlineTag.getContent().trim();
switch (inlineTag.getType()) {
case CODE:
case VALUE:
case LITERAL:
case SYSTEM_PROPERTY:
sb.append('`');
appendEscapedAsciiDoc(sb, content);
sb.append('`');
break;
case LINK:
case LINKPLAIN:
if (content.startsWith(HASH)) {
content = hyphenate(content.substring(1));
}
sb.append('`');
appendEscapedAsciiDoc(sb, content);
sb.append('`');
break;
default:
sb.append(content);
break;
}
} else {
appendHtml(sb, Jsoup.parseBodyFragment(javadocDescriptionElement.toText()));
}
}
return sb.toString().trim();
}
private StringBuilder appendEscapedAsciiDoc(StringBuilder sb, String text) {
boolean escaping = false;
for (int i = 0; i < text.length(); i++) {
final char ch = text.charAt(i);
switch (ch) {
case ']':
if (inlineMacroMode) {
if (escaping) {
sb.append("++");
escaping = false;
}
sb.append("&
break;
}
case '
case '*':
case '\\':
case '{':
case '}':
case '[':
case '|':
if (!escaping) {
sb.append("++");
escaping = true;
}
sb.append(ch);
break;
case '+':
if (escaping) {
sb.append("++");
escaping = false;
}
sb.append("{plus}");
break;
default:
if (escaping) {
sb.append("++");
escaping = false;
}
sb.append(ch);
}
}
if (escaping) {
sb.append("++");
}
return sb;
}
static class SectionHolder {
final String title;
final String details;
public SectionHolder(String title, String details) {
this.title = title;
this.details = details;
}
}
} |
I copied this pattern from the Smallrye Fault Tolerance extension, and I just check almost all extensions directly used `Thread.currentThread().getContextClassLoader()` so it should be safe to use it. | private MongoClientSettings createMongoConfiguration(MongoClientConfig config) {
if (config == null) {
throw new RuntimeException("mongo config is missing for creating mongo client.");
}
checkCodec();
CodecRegistry defaultCodecRegistry = MongoClientSettings.getDefaultCodecRegistry();
MongoClientSettings.Builder settings = MongoClientSettings.builder();
ConnectionString connectionString;
Optional<String> maybeConnectionString = config.connectionString;
if (maybeConnectionString.isPresent()) {
connectionString = new ConnectionString(maybeConnectionString.get());
settings.applyConnectionString(connectionString);
}
List<CodecProvider> providers = new ArrayList<>();
if (!codecProviders.isEmpty()) {
providers.addAll(getCodecProviders(codecProviders));
}
PojoCodecProvider.Builder pojoCodecProviderBuilder = PojoCodecProvider.builder()
.automatic(true)
.conventions(Conventions.DEFAULT_CONVENTIONS);
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
if (classLoader == null) {
classLoader = AbstractMongoClientProducer.class.getClassLoader();
}
for (String bsonDiscriminator : bsonDiscriminators) {
try {
pojoCodecProviderBuilder
.register(ClassModel.builder(Class.forName(bsonDiscriminator, true, classLoader))
.enableDiscriminator(true).build());
} catch (ClassNotFoundException e) {
}
}
providers.add(pojoCodecProviderBuilder.build());
CodecRegistry registry = CodecRegistries.fromRegistries(defaultCodecRegistry,
CodecRegistries.fromProviders(providers));
settings.codecRegistry(registry);
config.applicationName.ifPresent(settings::applicationName);
if (config.credentials != null) {
MongoCredential credential = createMongoCredential(config);
if (credential != null) {
settings.credential(credential);
}
}
if (config.writeConcern != null) {
WriteConcernConfig wc = config.writeConcern;
WriteConcern concern = (wc.safe ? WriteConcern.ACKNOWLEDGED : WriteConcern.UNACKNOWLEDGED)
.withJournal(wc.journal);
if (wc.wTimeout.isPresent()) {
concern = concern.withWTimeout(wc.wTimeout.get().toMillis(), TimeUnit.MILLISECONDS);
}
Optional<String> maybeW = wc.w;
if (maybeW.isPresent()) {
concern = concern.withW(maybeW.get());
}
settings.writeConcern(concern);
settings.retryWrites(wc.retryWrites);
}
if (config.tls) {
settings.applyToSslSettings(new SslSettingsBuilder(config, disableSslSupport));
}
settings.applyToClusterSettings(new ClusterSettingBuilder(config));
settings.applyToConnectionPoolSettings(new ConnectionPoolSettingsBuilder(config));
settings.applyToServerSettings(new ServerSettingsBuilder(config));
settings.applyToSocketSettings(new SocketSettingsBuilder(config));
if (config.readPreference.isPresent()) {
settings.readPreference(ReadPreference.valueOf(config.readPreference.get()));
}
return settings.build();
} | if (classLoader == null) { | private MongoClientSettings createMongoConfiguration(MongoClientConfig config) {
if (config == null) {
throw new RuntimeException("mongo config is missing for creating mongo client.");
}
checkCodec();
CodecRegistry defaultCodecRegistry = MongoClientSettings.getDefaultCodecRegistry();
MongoClientSettings.Builder settings = MongoClientSettings.builder();
ConnectionString connectionString;
Optional<String> maybeConnectionString = config.connectionString;
if (maybeConnectionString.isPresent()) {
connectionString = new ConnectionString(maybeConnectionString.get());
settings.applyConnectionString(connectionString);
}
List<CodecProvider> providers = new ArrayList<>();
if (!codecProviders.isEmpty()) {
providers.addAll(getCodecProviders(codecProviders));
}
PojoCodecProvider.Builder pojoCodecProviderBuilder = PojoCodecProvider.builder()
.automatic(true)
.conventions(Conventions.DEFAULT_CONVENTIONS);
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
for (String bsonDiscriminator : bsonDiscriminators) {
try {
pojoCodecProviderBuilder
.register(ClassModel.builder(Class.forName(bsonDiscriminator, true, classLoader))
.enableDiscriminator(true).build());
} catch (ClassNotFoundException e) {
}
}
providers.add(pojoCodecProviderBuilder.build());
CodecRegistry registry = CodecRegistries.fromRegistries(defaultCodecRegistry,
CodecRegistries.fromProviders(providers));
settings.codecRegistry(registry);
config.applicationName.ifPresent(settings::applicationName);
if (config.credentials != null) {
MongoCredential credential = createMongoCredential(config);
if (credential != null) {
settings.credential(credential);
}
}
if (config.writeConcern != null) {
WriteConcernConfig wc = config.writeConcern;
WriteConcern concern = (wc.safe ? WriteConcern.ACKNOWLEDGED : WriteConcern.UNACKNOWLEDGED)
.withJournal(wc.journal);
if (wc.wTimeout.isPresent()) {
concern = concern.withWTimeout(wc.wTimeout.get().toMillis(), TimeUnit.MILLISECONDS);
}
Optional<String> maybeW = wc.w;
if (maybeW.isPresent()) {
concern = concern.withW(maybeW.get());
}
settings.writeConcern(concern);
settings.retryWrites(wc.retryWrites);
}
if (config.tls) {
settings.applyToSslSettings(new SslSettingsBuilder(config, disableSslSupport));
}
settings.applyToClusterSettings(new ClusterSettingBuilder(config));
settings.applyToConnectionPoolSettings(new ConnectionPoolSettingsBuilder(config));
settings.applyToServerSettings(new ServerSettingsBuilder(config));
settings.applyToSocketSettings(new SocketSettingsBuilder(config));
if (config.readPreference.isPresent()) {
settings.readPreference(ReadPreference.valueOf(config.readPreference.get()));
}
return settings.build();
} | class ServerSettingsBuilder implements Block<ServerSettings.Builder> {
public ServerSettingsBuilder(MongoClientConfig config) {
this.config = config;
}
private MongoClientConfig config;
@Override
public void apply(ServerSettings.Builder builder) {
if (config.heartbeatFrequency.isPresent()) {
builder.heartbeatFrequency((int) config.heartbeatFrequency.get().toMillis(), TimeUnit.MILLISECONDS);
}
}
} | class ServerSettingsBuilder implements Block<ServerSettings.Builder> {
public ServerSettingsBuilder(MongoClientConfig config) {
this.config = config;
}
private MongoClientConfig config;
@Override
public void apply(ServerSettings.Builder builder) {
if (config.heartbeatFrequency.isPresent()) {
builder.heartbeatFrequency((int) config.heartbeatFrequency.get().toMillis(), TimeUnit.MILLISECONDS);
}
}
} |
Not sure on this style of returning a string error. Maybe another reviewer can comment. | public CounterUpdate transform(MonitoringInfo monitoringInfo) {
Optional<String> validationResult = validate(monitoringInfo);
if (validationResult.isPresent()) {
LOG.info(validationResult.get());
return null;
}
long value = monitoringInfo.getMetric().getCounterData().getInt64Value();
String urn = monitoringInfo.getUrn();
final String ptransform = monitoringInfo.getLabelsMap().get("PTRANSFORM");
DataflowStepContext stepContext = transformIdMapping.get(ptransform);
String counterName = urnToCounterNameMapping.get(urn);
CounterStructuredNameAndMetadata name = new CounterStructuredNameAndMetadata();
name.setName(
new CounterStructuredName()
.setOrigin("SYSTEM")
.setName(counterName)
.setOriginalStepName(stepContext.getNameContext().originalName())
.setExecutionStepName(stepContext.getNameContext().stageName()))
.setMetadata(new CounterMetadata().setKind("SUM"));
return new CounterUpdate()
.setStructuredNameAndMetadata(name)
.setCumulative(true)
.setInteger(DataflowCounterUpdateExtractor.longToSplitInt(value));
} | Optional<String> validationResult = validate(monitoringInfo); | public CounterUpdate transform(MonitoringInfo monitoringInfo) {
Optional<String> validationResult = validate(monitoringInfo);
if (validationResult.isPresent()) {
LOG.info(validationResult.get());
return null;
}
long value = monitoringInfo.getMetric().getCounterData().getInt64Value();
String urn = monitoringInfo.getUrn();
final String ptransform = monitoringInfo.getLabelsMap().get("PTRANSFORM");
DataflowStepContext stepContext = transformIdMapping.get(ptransform);
String counterName = urnToCounterNameMapping.get(urn);
CounterStructuredNameAndMetadata name = new CounterStructuredNameAndMetadata();
name.setName(
new CounterStructuredName()
.setOrigin("SYSTEM")
.setName(counterName)
.setOriginalStepName(stepContext.getNameContext().originalName())
.setExecutionStepName(stepContext.getNameContext().stageName()))
.setMetadata(new CounterMetadata().setKind("SUM"));
return new CounterUpdate()
.setStructuredNameAndMetadata(name)
.setCumulative(true)
.setInteger(DataflowCounterUpdateExtractor.longToSplitInt(value));
} | class MSecMonitoringInfoToCounterUpdateTransformer
implements MonitoringInfoToCounterUpdateTransformer {
private static final Logger LOG = LoggerFactory.getLogger(BeamFnMapTaskExecutor.class);
private SpecMonitoringInfoValidator specValidator;
private Map<String, DataflowStepContext> transformIdMapping;
private Map<String, String> urnToCounterNameMapping;
/**
* @param specValidator SpecMonitoringInfoValidator to utilize for default validation.
* @param transformIdMapping Mapping of PTransform ID string to DataflowStepContext.
*/
public MSecMonitoringInfoToCounterUpdateTransformer(
SpecMonitoringInfoValidator specValidator,
Map<String, DataflowStepContext> transformIdMapping) {
this.specValidator = specValidator;
this.transformIdMapping = transformIdMapping;
urnToCounterNameMapping = createKnownUrnToCounterNameMapping();
}
/** Allows to inject members for cleaner testing. */
@VisibleForTesting
protected MSecMonitoringInfoToCounterUpdateTransformer(
SpecMonitoringInfoValidator specValidator,
Map<String, DataflowStepContext> transformIdMapping,
Map<String, String> urnToCounterNameMapping) {
this.specValidator = specValidator;
this.transformIdMapping = transformIdMapping;
this.urnToCounterNameMapping = urnToCounterNameMapping;
}
@VisibleForTesting
protected Map<String, String> createKnownUrnToCounterNameMapping() {
Map<String, String> result = new HashMap<>();
result.put("beam:metric:pardo_execution_time:start_bundle_msecs:v1", "start-msecs");
result.put("beam:metric:pardo_execution_time:process_bundle_msecs:v1", "process-msecs");
result.put("beam:metric:pardo_execution_time:finish_bundle_msecs:v1", "finish-msecs");
return result;
}
/**
* Validates provided monitoring info against specs and common safety checks.
*
* @param monitoringInfo to validate.
* @return Optional.empty() all validation checks are passed. Optional with error text otherwise.
* @throws RuntimeException if received unexpected urn.
*/
protected Optional<String> validate(MonitoringInfo monitoringInfo) {
Optional<String> validatorResult = specValidator.validate(monitoringInfo);
if (validatorResult.isPresent()) {
return validatorResult;
}
String urn = monitoringInfo.getUrn();
if (!urnToCounterNameMapping.keySet().contains(urn)) {
throw new RuntimeException(String.format("Received unexpected counter urn: %s", urn));
}
final String ptransform = monitoringInfo.getLabelsMap().get("PTRANSFORM");
DataflowStepContext stepContext = transformIdMapping.get(ptransform);
if (stepContext == null) {
return Optional.of(
"Encountered MSec MonitoringInfo with unknown ptransformId: "
+ monitoringInfo.toString());
}
return Optional.empty();
}
@Override
/** @return iterable of Urns that this transformer can convert to CounterUpdates. */
public Iterable<String> getSupportedUrns() {
return this.urnToCounterNameMapping.keySet();
}
} | class MSecMonitoringInfoToCounterUpdateTransformer
implements MonitoringInfoToCounterUpdateTransformer {
private static final Logger LOG = LoggerFactory.getLogger(BeamFnMapTaskExecutor.class);
private SpecMonitoringInfoValidator specValidator;
private Map<String, DataflowStepContext> transformIdMapping;
private Map<String, String> urnToCounterNameMapping;
/**
* @param specValidator SpecMonitoringInfoValidator to utilize for default validation.
* @param transformIdMapping Mapping of PTransform ID string to DataflowStepContext.
*/
public MSecMonitoringInfoToCounterUpdateTransformer(
SpecMonitoringInfoValidator specValidator,
Map<String, DataflowStepContext> transformIdMapping) {
this.specValidator = specValidator;
this.transformIdMapping = transformIdMapping;
urnToCounterNameMapping = createKnownUrnToCounterNameMapping();
}
/** Allows to inject members for cleaner testing. */
@VisibleForTesting
protected MSecMonitoringInfoToCounterUpdateTransformer(
SpecMonitoringInfoValidator specValidator,
Map<String, DataflowStepContext> transformIdMapping,
Map<String, String> urnToCounterNameMapping) {
this.specValidator = specValidator;
this.transformIdMapping = transformIdMapping;
this.urnToCounterNameMapping = urnToCounterNameMapping;
}
@VisibleForTesting
protected Map<String, String> createKnownUrnToCounterNameMapping() {
Map<String, String> result = new HashMap<>();
result.put("beam:metric:pardo_execution_time:start_bundle_msecs:v1", "start-msecs");
result.put("beam:metric:pardo_execution_time:process_bundle_msecs:v1", "process-msecs");
result.put("beam:metric:pardo_execution_time:finish_bundle_msecs:v1", "finish-msecs");
return result;
}
/**
* Validates provided monitoring info against specs and common safety checks.
*
* @param monitoringInfo to validate.
* @return Optional.empty() all validation checks are passed. Optional with error text otherwise.
* @throws RuntimeException if received unexpected urn.
*/
protected Optional<String> validate(MonitoringInfo monitoringInfo) {
Optional<String> validatorResult = specValidator.validate(monitoringInfo);
if (validatorResult.isPresent()) {
return validatorResult;
}
String urn = monitoringInfo.getUrn();
if (!urnToCounterNameMapping.keySet().contains(urn)) {
throw new RuntimeException(String.format("Received unexpected counter urn: %s", urn));
}
final String ptransform = monitoringInfo.getLabelsMap().get("PTRANSFORM");
DataflowStepContext stepContext = transformIdMapping.get(ptransform);
if (stepContext == null) {
return Optional.of(
"Encountered MSec MonitoringInfo with unknown ptransformId: "
+ monitoringInfo.toString());
}
return Optional.empty();
}
@Override
/** @return iterable of Urns that this transformer can convert to CounterUpdates. */
public Iterable<String> getSupportedUrns() {
return this.urnToCounterNameMapping.keySet();
}
} |
Ah, yes, late night copy/paste. Fixed. | public void testStateNotKeyed() {
final String stateId = "foo";
MyIntegerCoder myIntegerCoder = MyIntegerCoder.of();
pipeline.getCoderRegistry().registerCoderForClass(MyInteger.class, myIntegerCoder);
DoFn<String, Integer> fn =
new DoFn<String, Integer>() {
@StateId(stateId)
private final StateSpec<ValueState<Integer>> intState =
StateSpecs.value();
@ProcessElement
public void processElement(
ProcessContext c, @StateId(stateId) ValueState<Integer> state) {}
};
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("state");
thrown.expectMessage("KvCoder");
pipeline.apply(Create.of("hello", "goodbye", "hello again")).apply(ParDo.of(fn));
} | pipeline.getCoderRegistry().registerCoderForClass(MyInteger.class, myIntegerCoder); | public void testStateNotKeyed() {
final String stateId = "foo";
DoFn<String, Integer> fn =
new DoFn<String, Integer>() {
@StateId(stateId)
private final StateSpec<ValueState<Integer>> intState =
StateSpecs.value();
@ProcessElement
public void processElement(
ProcessContext c, @StateId(stateId) ValueState<Integer> state) {}
};
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("state");
thrown.expectMessage("KvCoder");
pipeline.apply(Create.of("hello", "goodbye", "hello again")).apply(ParDo.of(fn));
} | class Checker implements SerializableFunction<Iterable<String>, Void> {
@Override
public Void apply(Iterable<String> input) {
boolean foundElement = false;
boolean foundFinish = false;
for (String str : input) {
if (str.equals("elem:1:1")) {
if (foundElement) {
throw new AssertionError("Received duplicate element");
}
foundElement = true;
} else if (str.equals("finish:3:3")) {
foundFinish = true;
} else {
throw new AssertionError("Got unexpected value: " + str);
}
}
if (!foundElement) {
throw new AssertionError("Missing \"elem:1:1\"");
}
if (!foundFinish) {
throw new AssertionError("Missing \"finish:3:3\"");
}
return null;
}
} | class Checker implements SerializableFunction<Iterable<String>, Void> {
@Override
public Void apply(Iterable<String> input) {
boolean foundElement = false;
boolean foundFinish = false;
for (String str : input) {
if (str.equals("elem:1:1")) {
if (foundElement) {
throw new AssertionError("Received duplicate element");
}
foundElement = true;
} else if (str.equals("finish:3:3")) {
foundFinish = true;
} else {
throw new AssertionError("Got unexpected value: " + str);
}
}
if (!foundElement) {
throw new AssertionError("Missing \"elem:1:1\"");
}
if (!foundFinish) {
throw new AssertionError("Missing \"finish:3:3\"");
}
return null;
}
} |
This behaviour is not expected by TCKs, see for instance https://github.com/eclipse/microprofile-context-propagation/blob/master/tck/src/main/java/org/eclipse/microprofile/context/tck/ManagedExecutorTest.java#L1737 | void initialize(ExecutorService executorService) {
managedExecutor = new ManagedExecutorImpl(-1, -1, (ThreadContextImpl) getAllThreadContext(), executorService, "no-ip") {
@Override
public void shutdown() {
throw new IllegalStateException("This executor is managed by the container and cannot be shut down.");
}
@Override
public List<Runnable> shutdownNow() {
throw new IllegalStateException("This executor is managed by the container and cannot be shut down.");
}
};
} | throw new IllegalStateException("This executor is managed by the container and cannot be shut down."); | void initialize(ExecutorService executorService) {
managedExecutor = new SmallRyeManagedExecutor(-1, -1, (SmallRyeThreadContext) getAllThreadContext(), executorService,
"no-ip") {
@Override
public void shutdown() {
throw new IllegalStateException("This executor is managed by the container and cannot be shut down.");
}
@Override
public List<Runnable> shutdownNow() {
throw new IllegalStateException("This executor is managed by the container and cannot be shut down.");
}
};
} | class SmallRyeContextPropagationProvider {
private volatile ManagedExecutorImpl managedExecutor;
@Produces
@Singleton
public ThreadContext getAllThreadContext() {
return ThreadContext.builder().propagated(ThreadContext.ALL_REMAINING).cleared().unchanged().build();
}
@Produces
@Singleton
public ManagedExecutor getAllManagedExecutor() {
return managedExecutor;
}
} | class SmallRyeContextPropagationProvider {
private SmallRyeManagedExecutor managedExecutor;
@Produces
@Singleton
public ThreadContext getAllThreadContext() {
return ThreadContext.builder().propagated(ThreadContext.ALL_REMAINING).cleared().unchanged().build();
}
@Produces
@Singleton
public ManagedExecutor getAllManagedExecutor() {
return managedExecutor;
}
} |
@tsreaper - I've changed the parallelism to 1 and now the output is in the same order as you had mentioned. I've also added table output recommended by @JingsongLi. Please let me know if you have any other recommendations. Thanks | public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
String contents =
"1,beer,3,2019-12-12 00:00:01\n"
+ "1,diaper,4,2019-12-12 00:00:02\n"
+ "2,pen,3,2019-12-12 00:00:04\n"
+ "2,rubber,3,2019-12-12 00:00:06\n"
+ "3,rubber,2,2019-12-12 00:00:05\n"
+ "4,beer,1,2019-12-12 00:00:08";
String path = createTempFile(contents);
String ddl =
"CREATE TABLE orders (\n"
+ " user_id INT,\n"
+ " product STRING,\n"
+ " amount INT,\n"
+ " ts TIMESTAMP(3),\n"
+ " WATERMARK FOR ts AS ts - INTERVAL '3' SECOND\n"
+ ") WITH (\n"
+ " 'connector.type' = 'filesystem',\n"
+ " 'connector.path' = '"
+ path
+ "',\n"
+ " 'format.type' = 'csv'\n"
+ ")";
tEnv.executeSql(ddl);
String query =
"SELECT\n"
+ " CAST(TUMBLE_START(ts, INTERVAL '5' SECOND) AS STRING) window_start,\n"
+ " COUNT(*) order_num,\n"
+ " SUM(amount) total_amount,\n"
+ " COUNT(DISTINCT product) unique_products\n"
+ "FROM orders\n"
+ "GROUP BY TUMBLE(ts, INTERVAL '5' SECOND)";
Table result = tEnv.sqlQuery(query);
tEnv.toAppendStream(result, Row.class).print();
env.execute("Streaming Window SQL Job");
} | public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
String contents =
"1,beer,3,2019-12-12 00:00:01\n"
+ "1,diaper,4,2019-12-12 00:00:02\n"
+ "2,pen,3,2019-12-12 00:00:04\n"
+ "2,rubber,3,2019-12-12 00:00:06\n"
+ "3,rubber,2,2019-12-12 00:00:05\n"
+ "4,beer,1,2019-12-12 00:00:08";
String path = createTempFile(contents);
String ddl =
"CREATE TABLE orders (\n"
+ " user_id INT,\n"
+ " product STRING,\n"
+ " amount INT,\n"
+ " ts TIMESTAMP(3),\n"
+ " WATERMARK FOR ts AS ts - INTERVAL '3' SECOND\n"
+ ") WITH (\n"
+ " 'connector.type' = 'filesystem',\n"
+ " 'connector.path' = '"
+ path
+ "',\n"
+ " 'format.type' = 'csv'\n"
+ ")";
tEnv.executeSql(ddl);
String query =
"SELECT\n"
+ " CAST(TUMBLE_START(ts, INTERVAL '5' SECOND) AS STRING) window_start,\n"
+ " COUNT(*) order_num,\n"
+ " SUM(amount) total_amount,\n"
+ " COUNT(DISTINCT product) unique_products\n"
+ "FROM orders\n"
+ "GROUP BY TUMBLE(ts, INTERVAL '5' SECOND)";
tEnv.executeSql(query).print();
} | class StreamWindowSQLExample {
/** Creates a temporary file with the contents and returns the absolute path. */
private static String createTempFile(String contents) throws IOException {
File tempFile = File.createTempFile("orders", ".csv");
tempFile.deleteOnExit();
FileUtils.writeFileUtf8(tempFile, contents);
return tempFile.toURI().toString();
}
} | class StreamWindowSQLExample {
/** Creates a temporary file with the contents and returns the absolute path. */
private static String createTempFile(String contents) throws IOException {
File tempFile = File.createTempFile("orders", ".csv");
tempFile.deleteOnExit();
FileUtils.writeFileUtf8(tempFile, contents);
return tempFile.toURI().toString();
}
} |
|
> only private ctor is enough, why add the exception? As suggestion by Sonar, this makes it more readable, and I accept it. | private SerializerUtils(){
throw new IllegalStateException("Utility class");
} | throw new IllegalStateException("Utility class"); | private SerializerUtils() {
} | class SerializerUtils {
private static final ObjectMapper OBJECT_MAPPER;
private static final TypeReference<Map<String, OAuth2AuthorizedClient>> TYPE_REFERENCE =
new TypeReference<Map<String, OAuth2AuthorizedClient>>() {
};
static {
OBJECT_MAPPER = new ObjectMapper();
OBJECT_MAPPER.registerModule(new OAuth2ClientJackson2Module());
OBJECT_MAPPER.registerModule(new AadOAuth2ClientJackson2Module());
OBJECT_MAPPER.registerModule(new CoreJackson2Module());
OBJECT_MAPPER.registerModule(new JavaTimeModule());
}
public static String serializeOAuth2AuthorizedClientMap(Map<String, OAuth2AuthorizedClient> authorizedClients) {
String result;
try {
result = OBJECT_MAPPER.writeValueAsString(authorizedClients);
} catch (JsonProcessingException e) {
throw new IllegalStateException(e);
}
return result;
}
public static Map<String, OAuth2AuthorizedClient> deserializeOAuth2AuthorizedClientMap(String authorizedClientsString) {
if (authorizedClientsString == null) {
return new HashMap<>();
}
Map<String, OAuth2AuthorizedClient> authorizedClients;
try {
authorizedClients = OBJECT_MAPPER.readValue(authorizedClientsString, TYPE_REFERENCE);
} catch (JsonProcessingException e) {
throw new IllegalStateException(e);
}
return authorizedClients;
}
} | class SerializerUtils {
private static final ObjectMapper OBJECT_MAPPER;
private static final TypeReference<Map<String, OAuth2AuthorizedClient>> TYPE_REFERENCE =
new TypeReference<Map<String, OAuth2AuthorizedClient>>() {
};
static {
OBJECT_MAPPER = new ObjectMapper();
OBJECT_MAPPER.registerModule(new OAuth2ClientJackson2Module());
OBJECT_MAPPER.registerModule(new AadOAuth2ClientJackson2Module());
OBJECT_MAPPER.registerModule(new CoreJackson2Module());
OBJECT_MAPPER.registerModule(new JavaTimeModule());
}
public static String serializeOAuth2AuthorizedClientMap(Map<String, OAuth2AuthorizedClient> authorizedClients) {
String result;
try {
result = OBJECT_MAPPER.writeValueAsString(authorizedClients);
} catch (JsonProcessingException e) {
throw new IllegalStateException(e);
}
return result;
}
public static Map<String, OAuth2AuthorizedClient> deserializeOAuth2AuthorizedClientMap(String authorizedClientsString) {
if (authorizedClientsString == null) {
return new HashMap<>();
}
Map<String, OAuth2AuthorizedClient> authorizedClients;
try {
authorizedClients = OBJECT_MAPPER.readValue(authorizedClientsString, TYPE_REFERENCE);
} catch (JsonProcessingException e) {
throw new IllegalStateException(e);
}
return authorizedClients;
}
} |
Is it related with this PR? | private void handleShowRoutineLoad() throws AnalysisException {
ShowRoutineLoadStmt showRoutineLoadStmt = (ShowRoutineLoadStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
List<RoutineLoadJob> routineLoadJobList;
try {
routineLoadJobList = GlobalStateMgr.getCurrentState().getRoutineLoadManager()
.getJob(showRoutineLoadStmt.getDbFullName(),
showRoutineLoadStmt.getName(),
showRoutineLoadStmt.isIncludeHistory());
} catch (MetaNotFoundException e) {
LOG.warn(e.getMessage(), e);
throw new AnalysisException(e.getMessage());
}
if (connectContext.getGlobalStateMgr().isUsingNewPrivilege()) {
Iterator<RoutineLoadJob> iterator = routineLoadJobList.iterator();
while (iterator.hasNext()) {
RoutineLoadJob routineLoadJob = iterator.next();
try {
if (!PrivilegeManager.checkAnyActionOnTable(connectContext,
routineLoadJob.getDbFullName(),
routineLoadJob.getTableName())) {
iterator.remove();
}
} catch (MetaNotFoundException e) {
}
}
}
if (routineLoadJobList != null) {
RoutineLoadFunctionalExprProvider fProvider = showRoutineLoadStmt.getFunctionalExprProvider(this.connectContext);
rows = routineLoadJobList.parallelStream()
.filter(fProvider.getPredicateChain())
.sorted(fProvider.getOrderComparator())
.skip(fProvider.getSkipCount())
.limit(fProvider.getLimitCount())
.map(job -> job.getShowInfo())
.collect(Collectors.toList());
}
if (!Strings.isNullOrEmpty(showRoutineLoadStmt.getName()) && rows.isEmpty()) {
throw new AnalysisException("There is no running job named " + showRoutineLoadStmt.getName()
+ " in db " + showRoutineLoadStmt.getDbFullName()
+ ". Include history? " + showRoutineLoadStmt.isIncludeHistory()
+ ", you can try `show all routine load job for job_name` if you want to list stopped and cancelled jobs");
}
resultSet = new ShowResultSet(showRoutineLoadStmt.getMetaData(), rows);
} | throw new AnalysisException("There is no running job named " + showRoutineLoadStmt.getName() | private void handleShowRoutineLoad() throws AnalysisException {
ShowRoutineLoadStmt showRoutineLoadStmt = (ShowRoutineLoadStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
List<RoutineLoadJob> routineLoadJobList;
try {
routineLoadJobList = GlobalStateMgr.getCurrentState().getRoutineLoadManager()
.getJob(showRoutineLoadStmt.getDbFullName(),
showRoutineLoadStmt.getName(),
showRoutineLoadStmt.isIncludeHistory());
} catch (MetaNotFoundException e) {
LOG.warn(e.getMessage(), e);
throw new AnalysisException(e.getMessage());
}
if (connectContext.getGlobalStateMgr().isUsingNewPrivilege()) {
Iterator<RoutineLoadJob> iterator = routineLoadJobList.iterator();
while (iterator.hasNext()) {
RoutineLoadJob routineLoadJob = iterator.next();
try {
if (!PrivilegeManager.checkAnyActionOnTable(connectContext,
routineLoadJob.getDbFullName(),
routineLoadJob.getTableName())) {
iterator.remove();
}
} catch (MetaNotFoundException e) {
}
}
}
if (routineLoadJobList != null) {
RoutineLoadFunctionalExprProvider fProvider = showRoutineLoadStmt.getFunctionalExprProvider(this.connectContext);
rows = routineLoadJobList.parallelStream()
.filter(fProvider.getPredicateChain())
.sorted(fProvider.getOrderComparator())
.skip(fProvider.getSkipCount())
.limit(fProvider.getLimitCount())
.map(job -> job.getShowInfo())
.collect(Collectors.toList());
}
if (!Strings.isNullOrEmpty(showRoutineLoadStmt.getName()) && rows.isEmpty()) {
throw new AnalysisException("There is no running job named " + showRoutineLoadStmt.getName()
+ " in db " + showRoutineLoadStmt.getDbFullName()
+ ". Include history? " + showRoutineLoadStmt.isIncludeHistory()
+ ", you can try `show all routine load job for job_name` if you want to list stopped and cancelled jobs");
}
resultSet = new ShowResultSet(showRoutineLoadStmt.getMetaData(), rows);
} | class ShowExecutor {
private static final Logger LOG = LogManager.getLogger(ShowExecutor.class);
private static final List<List<String>> EMPTY_SET = Lists.newArrayList();
private final ConnectContext connectContext;
private final ShowStmt stmt;
private ShowResultSet resultSet;
private final MetadataMgr metadataMgr;
public ShowExecutor(ConnectContext connectContext, ShowStmt stmt) {
this.connectContext = connectContext;
this.stmt = stmt;
resultSet = null;
metadataMgr = GlobalStateMgr.getCurrentState().getMetadataMgr();
}
public ShowResultSet execute() throws AnalysisException, DdlException {
if (stmt instanceof ShowMaterializedViewStmt) {
handleShowMaterializedView();
} else if (stmt instanceof ShowAuthorStmt) {
handleShowAuthor();
} else if (stmt instanceof ShowProcStmt) {
handleShowProc();
} else if (stmt instanceof HelpStmt) {
handleHelp();
} else if (stmt instanceof ShowWarehousesStmt) {
handleShowWarehouses();
} else if (stmt instanceof ShowClustersStmt) {
handleShowClusters();
} else if (stmt instanceof ShowDbStmt) {
handleShowDb();
} else if (stmt instanceof ShowTableStmt) {
handleShowTable();
} else if (stmt instanceof ShowTableStatusStmt) {
handleShowTableStatus();
} else if (stmt instanceof DescribeStmt) {
handleDescribe();
} else if (stmt instanceof ShowCreateTableStmt) {
handleShowCreateTable();
} else if (stmt instanceof ShowCreateDbStmt) {
handleShowCreateDb();
} else if (stmt instanceof ShowProcesslistStmt) {
handleShowProcesslist();
} else if (stmt instanceof ShowEnginesStmt) {
handleShowEngines();
} else if (stmt instanceof ShowFunctionsStmt) {
handleShowFunctions();
} else if (stmt instanceof ShowVariablesStmt) {
handleShowVariables();
} else if (stmt instanceof ShowColumnStmt) {
handleShowColumn();
} else if (stmt instanceof ShowLoadStmt) {
handleShowLoad();
} else if (stmt instanceof ShowRoutineLoadStmt) {
handleShowRoutineLoad();
} else if (stmt instanceof ShowRoutineLoadTaskStmt) {
handleShowRoutineLoadTask();
} else if (stmt instanceof ShowStreamLoadStmt) {
handleShowStreamLoad();
} else if (stmt instanceof ShowDeleteStmt) {
handleShowDelete();
} else if (stmt instanceof ShowAlterStmt) {
handleShowAlter();
} else if (stmt instanceof ShowUserPropertyStmt) {
handleShowUserProperty();
} else if (stmt instanceof ShowDataStmt) {
handleShowData();
} else if (stmt instanceof ShowCollationStmt) {
handleShowCollation();
} else if (stmt instanceof ShowPartitionsStmt) {
handleShowPartitions();
} else if (stmt instanceof ShowTabletStmt) {
handleShowTablet();
} else if (stmt instanceof ShowBackupStmt) {
handleShowBackup();
} else if (stmt instanceof ShowRestoreStmt) {
handleShowRestore();
} else if (stmt instanceof ShowBrokerStmt) {
handleShowBroker();
} else if (stmt instanceof ShowResourcesStmt) {
handleShowResources();
} else if (stmt instanceof ShowExportStmt) {
handleShowExport();
} else if (stmt instanceof ShowBackendsStmt) {
handleShowBackends();
} else if (stmt instanceof ShowFrontendsStmt) {
handleShowFrontends();
} else if (stmt instanceof ShowRepositoriesStmt) {
handleShowRepositories();
} else if (stmt instanceof ShowSnapshotStmt) {
handleShowSnapshot();
} else if (stmt instanceof ShowGrantsStmt) {
handleShowGrants();
} else if (stmt instanceof ShowRolesStmt) {
handleShowRoles();
} else if (stmt instanceof AdminShowReplicaStatusStmt) {
handleAdminShowTabletStatus();
} else if (stmt instanceof AdminShowReplicaDistributionStmt) {
handleAdminShowTabletDistribution();
} else if (stmt instanceof AdminShowConfigStmt) {
handleAdminShowConfig();
} else if (stmt instanceof ShowSmallFilesStmt) {
handleShowSmallFiles();
} else if (stmt instanceof ShowDynamicPartitionStmt) {
handleShowDynamicPartition();
} else if (stmt instanceof ShowIndexStmt) {
handleShowIndex();
} else if (stmt instanceof ShowTransactionStmt) {
handleShowTransaction();
} else if (stmt instanceof ShowPluginsStmt) {
handleShowPlugins();
} else if (stmt instanceof ShowSqlBlackListStmt) {
handleShowSqlBlackListStmt();
} else if (stmt instanceof ShowAnalyzeJobStmt) {
handleShowAnalyzeJob();
} else if (stmt instanceof ShowAnalyzeStatusStmt) {
handleShowAnalyzeStatus();
} else if (stmt instanceof ShowBasicStatsMetaStmt) {
handleShowBasicStatsMeta();
} else if (stmt instanceof ShowHistogramStatsMetaStmt) {
handleShowHistogramStatsMeta();
} else if (stmt instanceof ShowResourceGroupStmt) {
handleShowResourceGroup();
} else if (stmt instanceof ShowUserStmt) {
handleShowUser();
} else if (stmt instanceof ShowCatalogsStmt) {
handleShowCatalogs();
} else if (stmt instanceof ShowComputeNodesStmt) {
handleShowComputeNodes();
} else if (stmt instanceof ShowAuthenticationStmt) {
handleShowAuthentication();
} else if (stmt instanceof ShowCreateExternalCatalogStmt) {
handleShowCreateExternalCatalog();
} else {
handleEmtpy();
}
List<List<String>> rows = doPredicate(stmt, stmt.getMetaData(), resultSet.getResultRows());
return new ShowResultSet(resultSet.getMetaData(), rows);
}
private void handleShowAuthentication() {
final ShowAuthenticationStmt showAuthenticationStmt = (ShowAuthenticationStmt) stmt;
if (connectContext.getGlobalStateMgr().isUsingNewPrivilege()) {
AuthenticationManager authenticationManager = GlobalStateMgr.getCurrentState().getAuthenticationManager();
List<List<String>> userAuthInfos = Lists.newArrayList();
Map<UserIdentity, UserAuthenticationInfo> authenticationInfoMap = new HashMap<>();
if (showAuthenticationStmt.isAll()) {
authenticationInfoMap.putAll(authenticationManager.getUserToAuthenticationInfo());
} else {
UserAuthenticationInfo userAuthenticationInfo;
if (showAuthenticationStmt.getUserIdent() == null) {
userAuthenticationInfo = authenticationManager
.getUserAuthenticationInfoByUserIdentity(connectContext.getCurrentUserIdentity());
} else {
userAuthenticationInfo =
authenticationManager.getUserAuthenticationInfoByUserIdentity(showAuthenticationStmt.getUserIdent());
}
authenticationInfoMap.put(showAuthenticationStmt.getUserIdent(), userAuthenticationInfo);
}
for (Map.Entry<UserIdentity, UserAuthenticationInfo> entry : authenticationInfoMap.entrySet()) {
UserAuthenticationInfo userAuthenticationInfo = entry.getValue();
userAuthInfos.add(Lists.newArrayList(
entry.getKey().toString(),
userAuthenticationInfo.getPassword().length == 0 ? "No" : "Yes",
userAuthenticationInfo.getAuthPlugin(),
userAuthenticationInfo.getTextForAuthPlugin()));
}
resultSet = new ShowResultSet(showAuthenticationStmt.getMetaData(), userAuthInfos);
} else {
List<List<String>> rows;
if (showAuthenticationStmt.isAll()) {
rows = GlobalStateMgr.getCurrentState().getAuth()
.getAuthenticationInfo(showAuthenticationStmt.getUserIdent());
} else {
if (showAuthenticationStmt.getUserIdent() == null) {
rows = GlobalStateMgr.getCurrentState().getAuth()
.getAuthenticationInfo(connectContext.getCurrentUserIdentity());
} else {
rows = GlobalStateMgr.getCurrentState().getAuth()
.getAuthenticationInfo(showAuthenticationStmt.getUserIdent());
}
}
resultSet = new ShowResultSet(showAuthenticationStmt.getMetaData(), rows);
}
}
private void handleShowComputeNodes() {
final ShowComputeNodesStmt showStmt = (ShowComputeNodesStmt) stmt;
List<List<String>> computeNodesInfos = ComputeNodeProcDir.getClusterComputeNodesInfos();
resultSet = new ShowResultSet(showStmt.getMetaData(), computeNodesInfos);
}
private void handleShowMaterializedView() throws AnalysisException {
ShowMaterializedViewStmt showMaterializedViewStmt = (ShowMaterializedViewStmt) stmt;
String dbName = showMaterializedViewStmt.getDb();
List<List<String>> rowSets = Lists.newArrayList();
Database db = GlobalStateMgr.getCurrentState().getDb(dbName);
MetaUtils.checkDbNullAndReport(db, dbName);
db.readLock();
try {
PatternMatcher matcher = null;
if (showMaterializedViewStmt.getPattern() != null) {
matcher = PatternMatcher.createMysqlPattern(showMaterializedViewStmt.getPattern(),
CaseSensibility.TABLE.getCaseSensibility());
}
for (MaterializedView mvTable : db.getMaterializedViews()) {
if (GlobalStateMgr.getCurrentState().isUsingNewPrivilege()) {
AtomicBoolean baseTableHasPrivilege = new AtomicBoolean(true);
mvTable.getBaseTableInfos().forEach(baseTableInfo -> {
Table baseTable = baseTableInfo.getTable();
if (baseTable != null && baseTable.isLocalTable() && !PrivilegeManager.
checkTableAction(connectContext, baseTableInfo.getDbName(),
baseTableInfo.getTableName(),
PrivilegeType.SELECT)) {
baseTableHasPrivilege.set(false);
}
});
if (!baseTableHasPrivilege.get()) {
continue;
}
if (!PrivilegeManager.checkAnyActionOnMaterializedView(connectContext, db.getFullName(),
mvTable.getName())) {
continue;
}
}
if (matcher != null && !matcher.match(mvTable.getName())) {
continue;
}
List<String> resultRow = Lists.newArrayList(String.valueOf(mvTable.getId()), mvTable.getName(), dbName,
mvTable.getMaterializedViewDdlStmt(true), String.valueOf(mvTable.getRowCount()));
rowSets.add(resultRow);
}
for (Table table : db.getTables()) {
if (table.getType() == Table.TableType.OLAP) {
OlapTable olapTable = (OlapTable) table;
List<MaterializedIndex> visibleMaterializedViews = olapTable.getVisibleIndex();
long baseIdx = olapTable.getBaseIndexId();
for (MaterializedIndex mvIdx : visibleMaterializedViews) {
if (baseIdx == mvIdx.getId()) {
continue;
}
if (matcher != null && !matcher.match(olapTable.getIndexNameById(mvIdx.getId()))) {
continue;
}
ArrayList<String> resultRow = new ArrayList<>();
MaterializedIndexMeta mvMeta = olapTable.getVisibleIndexIdToMeta().get(mvIdx.getId());
resultRow.add(String.valueOf(mvIdx.getId()));
resultRow.add(olapTable.getIndexNameById(mvIdx.getId()));
resultRow.add(dbName);
if (mvMeta.getOriginStmt() == null) {
StringBuilder originStmtBuilder = new StringBuilder(
"create materialized view " + olapTable.getIndexNameById(mvIdx.getId()) +
" as select ");
String groupByString = "";
for (Column column : mvMeta.getSchema()) {
if (column.isKey()) {
groupByString += column.getName() + ",";
}
}
originStmtBuilder.append(groupByString);
for (Column column : mvMeta.getSchema()) {
if (!column.isKey()) {
originStmtBuilder.append(column.getAggregationType().toString()).append("(")
.append(column.getName()).append(")").append(",");
}
}
originStmtBuilder.delete(originStmtBuilder.length() - 1, originStmtBuilder.length());
originStmtBuilder.append(" from ").append(olapTable.getName()).append(" group by ")
.append(groupByString);
originStmtBuilder.delete(originStmtBuilder.length() - 1, originStmtBuilder.length());
resultRow.add(originStmtBuilder.toString());
} else {
resultRow.add(mvMeta.getOriginStmt().replace("\n", "").replace("\t", "")
.replaceAll("[ ]+", " "));
}
resultRow.add(String.valueOf(mvIdx.getRowCount()));
rowSets.add(resultRow);
}
}
}
} finally {
db.readUnlock();
}
resultSet = new ShowResultSet(stmt.getMetaData(), rowSets);
}
private void handleShowProcesslist() {
ShowProcesslistStmt showStmt = (ShowProcesslistStmt) stmt;
List<List<String>> rowSet = Lists.newArrayList();
List<ConnectContext.ThreadInfo> threadInfos = connectContext.getConnectScheduler()
.listConnection(connectContext.getQualifiedUser());
long nowMs = System.currentTimeMillis();
for (ConnectContext.ThreadInfo info : threadInfos) {
List<String> row = info.toRow(nowMs, showStmt.showFull());
if (row != null) {
rowSet.add(row);
}
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);
}
private void handleEmtpy() {
resultSet = new ShowResultSet(stmt.getMetaData(), EMPTY_SET);
}
private void handleShowAuthor() {
ShowAuthorStmt showAuthorStmt = (ShowAuthorStmt) stmt;
List<List<String>> rowSet = Lists.newArrayList();
resultSet = new ShowResultSet(showAuthorStmt.getMetaData(), rowSet);
}
private void handleShowEngines() {
ShowEnginesStmt showStmt = (ShowEnginesStmt) stmt;
List<List<String>> rowSet = Lists.newArrayList();
rowSet.add(Lists.newArrayList("OLAP", "YES", "Default storage engine of StarRocks", "NO", "NO", "NO"));
rowSet.add(Lists.newArrayList("MySQL", "YES", "MySQL server which data is in it", "NO", "NO", "NO"));
rowSet.add(Lists.newArrayList("ELASTICSEARCH", "YES", "ELASTICSEARCH cluster which data is in it", "NO", "NO",
"NO"));
rowSet.add(Lists.newArrayList("HIVE", "YES", "HIVE database which data is in it", "NO", "NO", "NO"));
rowSet.add(Lists.newArrayList("ICEBERG", "YES", "ICEBERG data lake which data is in it", "NO", "NO", "NO"));
resultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);
}
private void handleShowFunctions() throws AnalysisException {
ShowFunctionsStmt showStmt = (ShowFunctionsStmt) stmt;
List<Function> functions = null;
if (showStmt.getIsBuiltin()) {
functions = connectContext.getGlobalStateMgr().getBuiltinFunctions();
} else if (showStmt.getIsGlobal()) {
functions = connectContext.getGlobalStateMgr().getGlobalFunctionMgr().getFunctions();
} else {
Database db = connectContext.getGlobalStateMgr().getDb(showStmt.getDbName());
MetaUtils.checkDbNullAndReport(db, showStmt.getDbName());
functions = db.getFunctions();
}
List<List<Comparable>> rowSet = Lists.newArrayList();
for (Function function : functions) {
List<Comparable> row = function.getInfo(showStmt.getIsVerbose());
if (showStmt.getWild() == null || showStmt.like(function.functionName())) {
rowSet.add(row);
}
}
ListComparator<List<Comparable>> comparator = null;
OrderByPair orderByPair = new OrderByPair(0, false);
comparator = new ListComparator<>(orderByPair);
Collections.sort(rowSet, comparator);
List<List<String>> resultRowSet = Lists.newArrayList();
Set<String> functionNameSet = new HashSet<>();
for (List<Comparable> row : rowSet) {
List<String> resultRow = Lists.newArrayList();
if (functionNameSet.contains(row.get(0).toString())) {
continue;
}
for (Comparable column : row) {
resultRow.add(column.toString());
}
resultRowSet.add(resultRow);
functionNameSet.add(resultRow.get(0));
}
ShowResultSetMetaData showMetaData = showStmt.getIsVerbose() ? showStmt.getMetaData() :
ShowResultSetMetaData.builder()
.addColumn(new Column("Function Name", ScalarType.createVarchar(256))).build();
resultSet = new ShowResultSet(showMetaData, resultRowSet);
}
private void handleShowProc() throws AnalysisException {
ShowProcStmt showProcStmt = (ShowProcStmt) stmt;
ShowResultSetMetaData metaData = showProcStmt.getMetaData();
ProcNodeInterface procNode = showProcStmt.getNode();
List<List<String>> finalRows = procNode.fetchResult().getRows();
resultSet = new ShowResultSet(metaData, finalRows);
}
private void handleShowDb() throws AnalysisException, DdlException {
ShowDbStmt showDbStmt = (ShowDbStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
List<String> dbNames = new ArrayList<>();
String catalogName;
if (showDbStmt.getCatalogName() == null) {
catalogName = connectContext.getCurrentCatalog();
} else {
catalogName = showDbStmt.getCatalogName();
}
dbNames = metadataMgr.listDbNames(catalogName);
PatternMatcher matcher = null;
if (showDbStmt.getPattern() != null) {
matcher = PatternMatcher.createMysqlPattern(showDbStmt.getPattern(),
CaseSensibility.DATABASE.getCaseSensibility());
}
Set<String> dbNameSet = Sets.newTreeSet();
for (String dbName : dbNames) {
if (matcher != null && !matcher.match(dbName)) {
continue;
}
if (connectContext.getGlobalStateMgr().isUsingNewPrivilege()) {
if (CatalogMgr.isInternalCatalog(catalogName) &&
!PrivilegeManager.checkAnyActionOnOrInDb(connectContext, dbName)) {
continue;
}
} else {
if (!PrivilegeChecker.checkDbPriv(connectContext, catalogName, dbName, PrivPredicate.SHOW)) {
continue;
}
}
dbNameSet.add(dbName);
}
for (String dbName : dbNameSet) {
rows.add(Lists.newArrayList(dbName));
}
resultSet = new ShowResultSet(showDbStmt.getMetaData(), rows);
}
private void handleShowTable() throws AnalysisException {
ShowTableStmt showTableStmt = (ShowTableStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
String catalogName = showTableStmt.getCatalogName();
if (catalogName == null) {
catalogName = connectContext.getCurrentCatalog();
}
String dbName = showTableStmt.getDb();
Database db = metadataMgr.getDb(catalogName, dbName);
PatternMatcher matcher = null;
if (showTableStmt.getPattern() != null) {
matcher = PatternMatcher.createMysqlPattern(showTableStmt.getPattern(),
CaseSensibility.TABLE.getCaseSensibility());
}
Map<String, String> tableMap = Maps.newTreeMap();
MetaUtils.checkDbNullAndReport(db, showTableStmt.getDb());
if (CatalogMgr.isInternalCatalog(catalogName)) {
db.readLock();
try {
for (Table tbl : db.getTables()) {
if (matcher != null && !matcher.match(tbl.getName())) {
continue;
}
if (connectContext.getGlobalStateMgr().isUsingNewPrivilege()) {
if (!PrivilegeManager.checkAnyActionOnTable(connectContext, db.getFullName(), tbl.getName())) {
continue;
}
} else {
if (!PrivilegeChecker.checkTblPriv(ConnectContext.get(), catalogName,
db.getFullName(), tbl.getName(), PrivPredicate.SHOW)) {
continue;
}
}
tableMap.put(tbl.getName(), tbl.getMysqlType());
}
} finally {
db.readUnlock();
}
} else {
List<String> tableNames = metadataMgr.listTableNames(catalogName, dbName);
if (matcher != null) {
tableNames = tableNames.stream().filter(matcher::match).collect(Collectors.toList());
}
tableNames.forEach(name -> tableMap.put(name, "BASE TABLE"));
}
for (Map.Entry<String, String> entry : tableMap.entrySet()) {
if (showTableStmt.isVerbose()) {
rows.add(Lists.newArrayList(entry.getKey(), entry.getValue()));
} else {
rows.add(Lists.newArrayList(entry.getKey()));
}
}
resultSet = new ShowResultSet(showTableStmt.getMetaData(), rows);
}
private void handleShowTableStatus() throws AnalysisException {
ShowTableStatusStmt showStmt = (ShowTableStatusStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
Database db = connectContext.getGlobalStateMgr().getDb(showStmt.getDb());
if (db != null) {
db.readLock();
try {
PatternMatcher matcher = null;
if (showStmt.getPattern() != null) {
matcher = PatternMatcher.createMysqlPattern(showStmt.getPattern(),
CaseSensibility.TABLE.getCaseSensibility());
}
for (Table table : db.getTables()) {
if (matcher != null && !matcher.match(table.getName())) {
continue;
}
if (GlobalStateMgr.getCurrentState().isUsingNewPrivilege()) {
if (!PrivilegeManager.checkAnyActionOnTable(connectContext, db.getFullName(), table.getName())) {
continue;
}
} else if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(ConnectContext.get(),
db.getFullName(), table.getName(),
PrivPredicate.SHOW)) {
continue;
}
List<String> row = Lists.newArrayList();
row.add(table.getName());
row.add(table.getEngine());
for (int i = 0; i < 15; ++i) {
row.add(null);
}
row.add(table.getComment());
rows.add(row);
}
} finally {
db.readUnlock();
}
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowVariables() throws AnalysisException {
ShowVariablesStmt showStmt = (ShowVariablesStmt) stmt;
PatternMatcher matcher = null;
if (showStmt.getPattern() != null) {
matcher = PatternMatcher.createMysqlPattern(showStmt.getPattern(),
CaseSensibility.VARIABLES.getCaseSensibility());
}
List<List<String>> rows = VariableMgr.dump(showStmt.getType(), connectContext.getSessionVariable(), matcher);
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowCreateDb() throws AnalysisException {
ShowCreateDbStmt showStmt = (ShowCreateDbStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
Database db = connectContext.getGlobalStateMgr().getDb(showStmt.getDb());
MetaUtils.checkDbNullAndReport(db, showStmt.getDb());
rows.add(Lists.newArrayList(showStmt.getDb(),
"CREATE DATABASE `" + showStmt.getDb() + "`"));
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowCreateTable() throws AnalysisException {
ShowCreateTableStmt showStmt = (ShowCreateTableStmt) stmt;
TableName tbl = showStmt.getTbl();
String catalogName = tbl.getCatalog();
if (catalogName == null) {
catalogName = connectContext.getCurrentCatalog();
}
if (CatalogMgr.isInternalCatalog(catalogName)) {
showCreateInternalCatalogTable(showStmt);
} else {
showCreateExternalCatalogTable(tbl, catalogName);
}
}
private void showCreateExternalCatalogTable(TableName tbl, String catalogName) {
String dbName = tbl.getDb();
String tableName = tbl.getTbl();
MetadataMgr metadataMgr = GlobalStateMgr.getCurrentState().getMetadataMgr();
Database db = metadataMgr.getDb(catalogName, dbName);
if (db == null) {
ErrorReport.reportSemanticException(ErrorCode.ERR_BAD_DB_ERROR, dbName);
}
Table table = metadataMgr.getTable(catalogName, dbName, tableName);
if (table == null) {
ErrorReport.reportSemanticException(ErrorCode.ERR_BAD_TABLE_ERROR, tableName);
}
StringBuilder createTableSql = new StringBuilder();
createTableSql.append("CREATE TABLE ")
.append("`").append(tableName).append("`")
.append(" (\n");
List<String> columns = table.getFullSchema().stream().map(
this::toMysqlDDL).collect(Collectors.toList());
createTableSql.append(String.join(",\n", columns))
.append("\n)");
if (table.getType() != JDBC && !table.isUnPartitioned()) {
createTableSql.append("\nWITH (\n partitioned_by = ARRAY [ ");
createTableSql.append(String.join(", ", table.getPartitionColumnNames())).append(" ]\n)");
}
String location = null;
if (table.isHiveTable() || table.isHudiTable()) {
location = ((HiveMetaStoreTable) table).getTableLocation();
} else if (table.isIcebergTable()) {
location = ((IcebergTable) table).getTableLocation();
} else if (table.isDeltalakeTable()) {
location = ((DeltaLakeTable) table).getTableLocation();
}
if (!Strings.isNullOrEmpty(location)) {
createTableSql.append("\nLOCATION ").append("'").append(location).append("'");
}
List<List<String>> rows = Lists.newArrayList();
rows.add(Lists.newArrayList(tableName, createTableSql.toString()));
resultSet = new ShowResultSet(stmt.getMetaData(), rows);
}
private String toMysqlDDL(Column column) {
StringBuilder sb = new StringBuilder();
sb.append(" `").append(column.getName()).append("` ");
switch (column.getType().getPrimitiveType()) {
case TINYINT:
sb.append("tinyint(4)");
break;
case SMALLINT:
sb.append("smallint(6)");
break;
case INT:
sb.append("int(11)");
break;
case BIGINT:
sb.append("bigint(20)");
break;
case FLOAT:
sb.append("float");
break;
case DOUBLE:
sb.append("double");
case DECIMAL32:
case DECIMAL64:
case DECIMAL128:
case DECIMALV2:
sb.append("decimal");
break;
case DATE:
case DATETIME:
sb.append("datetime");
break;
case CHAR:
case VARCHAR:
sb.append("varchar(1048576)");
break;
default:
sb.append("binary(1048576)");
}
sb.append(" DEFAULT NULL");
return sb.toString();
}
private void showCreateInternalCatalogTable(ShowCreateTableStmt showStmt) throws AnalysisException {
Database db = connectContext.getGlobalStateMgr().getDb(showStmt.getDb());
MetaUtils.checkDbNullAndReport(db, showStmt.getDb());
List<List<String>> rows = Lists.newArrayList();
db.readLock();
try {
Table table = db.getTable(showStmt.getTable());
if (table == null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR, showStmt.getTable());
}
List<String> createTableStmt = Lists.newArrayList();
GlobalStateMgr.getDdlStmt(table, createTableStmt, null, null, false, true /* hide password */);
if (createTableStmt.isEmpty()) {
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
return;
}
if (table instanceof View) {
if (showStmt.getType() == ShowCreateTableStmt.CreateTableType.MATERIALIZED_VIEW) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_WRONG_OBJECT, showStmt.getDb(),
showStmt.getTable(), "MATERIALIZED VIEW");
}
View view = (View) table;
StringBuilder sb = new StringBuilder();
sb.append("CREATE VIEW `").append(table.getName()).append("` AS ").append(view.getInlineViewDef());
rows.add(Lists.newArrayList(table.getName(), createTableStmt.get(0), "utf8", "utf8_general_ci"));
resultSet = new ShowResultSet(ShowCreateTableStmt.getViewMetaData(), rows);
} else if (table instanceof MaterializedView) {
if (showStmt.getType() == ShowCreateTableStmt.CreateTableType.VIEW) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_WRONG_OBJECT, showStmt.getDb(),
showStmt.getTable(), "VIEW");
}
rows.add(Lists.newArrayList(table.getName(), createTableStmt.get(0)));
resultSet = new ShowResultSet(ShowCreateTableStmt.getMaterializedViewMetaData(), rows);
} else {
if (showStmt.getType() != ShowCreateTableStmt.CreateTableType.TABLE) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_WRONG_OBJECT, showStmt.getDb(),
showStmt.getTable(), showStmt.getType().getValue());
}
rows.add(Lists.newArrayList(table.getName(), createTableStmt.get(0)));
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
} finally {
db.readUnlock();
}
}
private void handleDescribe() throws AnalysisException {
DescribeStmt describeStmt = (DescribeStmt) stmt;
resultSet = new ShowResultSet(describeStmt.getMetaData(), describeStmt.getResultRows());
}
private void handleShowColumn() throws AnalysisException {
ShowColumnStmt showStmt = (ShowColumnStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
Database db = connectContext.getGlobalStateMgr().getDb(showStmt.getDb());
MetaUtils.checkDbNullAndReport(db, showStmt.getDb());
db.readLock();
try {
Table table = db.getTable(showStmt.getTable());
if (table == null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR,
showStmt.getDb() + "." + showStmt.getTable());
}
PatternMatcher matcher = null;
if (showStmt.getPattern() != null) {
matcher = PatternMatcher.createMysqlPattern(showStmt.getPattern(),
CaseSensibility.COLUMN.getCaseSensibility());
}
List<Column> columns = table.getBaseSchema();
for (Column col : columns) {
if (matcher != null && !matcher.match(col.getName())) {
continue;
}
final String columnName = col.getName();
final String columnType = col.getType().canonicalName().toLowerCase();
final String isAllowNull = col.isAllowNull() ? "YES" : "NO";
final String isKey = col.isKey() ? "YES" : "NO";
final String defaultValue = col.getMetaDefaultValue(Lists.newArrayList());
final String aggType = col.getAggregationType() == null
|| col.isAggregationTypeImplicit() ? "" : col.getAggregationType().toSql();
if (showStmt.isVerbose()) {
rows.add(Lists.newArrayList(columnName,
columnType,
"",
isAllowNull,
isKey,
defaultValue,
aggType,
"",
col.getComment()));
} else {
rows.add(Lists.newArrayList(columnName,
columnType,
isAllowNull,
isKey,
defaultValue,
aggType));
}
}
} finally {
db.readUnlock();
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowIndex() throws AnalysisException {
ShowIndexStmt showStmt = (ShowIndexStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
Database db = connectContext.getGlobalStateMgr().getDb(showStmt.getDbName());
MetaUtils.checkDbNullAndReport(db, showStmt.getDbName());
db.readLock();
try {
Table table = db.getTable(showStmt.getTableName().getTbl());
if (table == null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR,
db.getOriginName() + "." + showStmt.getTableName().toString());
} else if (table instanceof OlapTable) {
List<Index> indexes = ((OlapTable) table).getIndexes();
for (Index index : indexes) {
rows.add(Lists.newArrayList(showStmt.getTableName().toString(), "", index.getIndexName(),
"", String.join(",", index.getColumns()), "", "", "", "",
"", index.getIndexType().name(), index.getComment()));
}
} else {
}
} finally {
db.readUnlock();
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleHelp() {
HelpStmt helpStmt = (HelpStmt) stmt;
String mark = helpStmt.getMask();
HelpModule module = HelpModule.getInstance();
HelpTopic topic = module.getTopic(mark);
if (topic == null) {
List<String> topics = module.listTopicByKeyword(mark);
if (topics.size() == 0) {
topic = null;
} else if (topics.size() == 1) {
topic = module.getTopic(topics.get(0));
} else {
List<List<String>> rows = Lists.newArrayList();
for (String str : topics) {
rows.add(Lists.newArrayList(str, "N"));
}
List<String> categories = module.listCategoryByName(mark);
for (String str : categories) {
rows.add(Lists.newArrayList(str, "Y"));
}
resultSet = new ShowResultSet(helpStmt.getKeywordMetaData(), rows);
return;
}
}
if (topic != null) {
resultSet = new ShowResultSet(helpStmt.getMetaData(), Lists.<List<String>>newArrayList(
Lists.newArrayList(topic.getName(), topic.getDescription(), topic.getExample())));
} else {
List<String> categories = module.listCategoryByName(mark);
if (categories.isEmpty()) {
resultSet = new ShowResultSet(helpStmt.getKeywordMetaData(), EMPTY_SET);
} else if (categories.size() > 1) {
resultSet = new ShowResultSet(helpStmt.getCategoryMetaData(),
Lists.<List<String>>newArrayList(categories));
} else {
List<List<String>> rows = Lists.newArrayList();
List<String> topics = module.listTopicByCategory(categories.get(0));
for (String str : topics) {
rows.add(Lists.newArrayList(str, "N"));
}
List<String> subCategories = module.listCategoryByCategory(categories.get(0));
for (String str : subCategories) {
rows.add(Lists.newArrayList(str, "Y"));
}
resultSet = new ShowResultSet(helpStmt.getKeywordMetaData(), rows);
}
}
}
private void handleShowLoad() throws AnalysisException {
ShowLoadStmt showStmt = (ShowLoadStmt) stmt;
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
Database db = globalStateMgr.getDb(showStmt.getDbName());
MetaUtils.checkDbNullAndReport(db, showStmt.getDbName());
long dbId = db.getId();
Set<String> statesValue = showStmt.getStates() == null ? null : showStmt.getStates().stream()
.map(entity -> entity.name())
.collect(Collectors.toSet());
List<List<Comparable>> loadInfos =
globalStateMgr.getLoadManager().getLoadJobInfosByDb(dbId, showStmt.getLabelValue(),
showStmt.isAccurateMatch(),
statesValue);
List<OrderByPair> orderByPairs = showStmt.getOrderByPairs();
ListComparator<List<Comparable>> comparator = null;
if (orderByPairs != null) {
OrderByPair[] orderByPairArr = new OrderByPair[orderByPairs.size()];
comparator = new ListComparator<List<Comparable>>(orderByPairs.toArray(orderByPairArr));
} else {
comparator = new ListComparator<List<Comparable>>(0);
}
Collections.sort(loadInfos, comparator);
List<List<String>> rows = Lists.newArrayList();
for (List<Comparable> loadInfo : loadInfos) {
List<String> oneInfo = new ArrayList<String>(loadInfo.size());
for (Comparable element : loadInfo) {
oneInfo.add(element.toString());
}
rows.add(oneInfo);
}
long limit = showStmt.getLimit();
long offset = showStmt.getOffset() == -1L ? 0 : showStmt.getOffset();
if (offset >= rows.size()) {
rows = Lists.newArrayList();
} else if (limit != -1L) {
if ((limit + offset) < rows.size()) {
rows = rows.subList((int) offset, (int) (limit + offset));
} else {
rows = rows.subList((int) offset, rows.size());
}
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowRoutineLoadTask() throws AnalysisException {
ShowRoutineLoadTaskStmt showRoutineLoadTaskStmt = (ShowRoutineLoadTaskStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
RoutineLoadJob routineLoadJob;
try {
routineLoadJob =
GlobalStateMgr.getCurrentState().getRoutineLoadManager()
.getJob(showRoutineLoadTaskStmt.getDbFullName(),
showRoutineLoadTaskStmt.getJobName());
} catch (MetaNotFoundException e) {
LOG.warn(e.getMessage(), e);
throw new AnalysisException(e.getMessage());
}
if (routineLoadJob == null) {
throw new AnalysisException("The job named " + showRoutineLoadTaskStmt.getJobName() + "does not exists "
+ "or job state is stopped or cancelled");
}
String dbFullName = showRoutineLoadTaskStmt.getDbFullName();
String tableName;
try {
tableName = routineLoadJob.getTableName();
} catch (MetaNotFoundException e) {
throw new AnalysisException(
"The table metadata of job has been changed. The job will be cancelled automatically", e);
}
if (connectContext.getGlobalStateMgr().isUsingNewPrivilege()) {
if (!PrivilegeManager.checkAnyActionOnTable(connectContext, dbFullName, tableName)) {
resultSet = new ShowResultSet(showRoutineLoadTaskStmt.getMetaData(), rows);
return;
}
} else {
if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(ConnectContext.get(),
dbFullName,
tableName,
PrivPredicate.LOAD)) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "LOAD",
ConnectContext.get().getQualifiedUser(),
ConnectContext.get().getRemoteIP(),
tableName);
}
}
rows.addAll(routineLoadJob.getTasksShowInfo());
resultSet = new ShowResultSet(showRoutineLoadTaskStmt.getMetaData(), rows);
}
private void handleShowStreamLoad() throws AnalysisException {
ShowStreamLoadStmt showStreamLoadStmt = (ShowStreamLoadStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
List<StreamLoadTask> streamLoadTaskList;
try {
streamLoadTaskList = GlobalStateMgr.getCurrentState().getStreamLoadManager()
.getTask(showStreamLoadStmt.getDbFullName(),
showStreamLoadStmt.getName(),
showStreamLoadStmt.isIncludeHistory());
} catch (MetaNotFoundException e) {
LOG.warn(e.getMessage(), e);
throw new AnalysisException(e.getMessage());
}
if (streamLoadTaskList != null) {
StreamLoadFunctionalExprProvider fProvider = showStreamLoadStmt.getFunctionalExprProvider(this.connectContext);
rows = streamLoadTaskList.parallelStream()
.filter(fProvider.getPredicateChain())
.sorted(fProvider.getOrderComparator())
.skip(fProvider.getSkipCount())
.limit(fProvider.getLimitCount())
.map(task -> task.getShowInfo())
.collect(Collectors.toList());
}
if (!Strings.isNullOrEmpty(showStreamLoadStmt.getName()) && rows.isEmpty()) {
throw new AnalysisException("There is no label named " + showStreamLoadStmt.getName()
+ " in db " + showStreamLoadStmt.getDbFullName()
+ ". Include history? " + showStreamLoadStmt.isIncludeHistory());
}
resultSet = new ShowResultSet(showStreamLoadStmt.getMetaData(), rows);
}
private void handleShowUserProperty() throws AnalysisException {
ShowUserPropertyStmt showStmt = (ShowUserPropertyStmt) stmt;
resultSet = new ShowResultSet(showStmt.getMetaData(), showStmt.getRows(connectContext));
}
private void handleShowDelete() throws AnalysisException {
ShowDeleteStmt showStmt = (ShowDeleteStmt) stmt;
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
Database db = globalStateMgr.getDb(showStmt.getDbName());
MetaUtils.checkDbNullAndReport(db, showStmt.getDbName());
long dbId = db.getId();
DeleteHandler deleteHandler = globalStateMgr.getDeleteHandler();
Load load = globalStateMgr.getLoadInstance();
List<List<Comparable>> deleteInfos = deleteHandler.getDeleteInfosByDb(dbId);
List<List<String>> rows = Lists.newArrayList();
for (List<Comparable> deleteInfo : deleteInfos) {
List<String> oneInfo = new ArrayList<String>(deleteInfo.size());
for (Comparable element : deleteInfo) {
oneInfo.add(element.toString());
}
rows.add(oneInfo);
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowAlter() throws AnalysisException {
ShowAlterStmt showStmt = (ShowAlterStmt) stmt;
ProcNodeInterface procNodeI = showStmt.getNode();
Preconditions.checkNotNull(procNodeI);
List<List<String>> rows;
if (procNodeI instanceof SchemaChangeProcDir) {
rows = ((SchemaChangeProcDir) procNodeI).fetchResultByFilter(showStmt.getFilterMap(),
showStmt.getOrderPairs(), showStmt.getLimitElement()).getRows();
} else {
rows = procNodeI.fetchResult().getRows();
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowCollation() {
ShowCollationStmt showStmt = (ShowCollationStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
List<String> row = Lists.newArrayList();
row.add("utf8_general_ci");
row.add("utf8");
row.add("33");
row.add("Yes");
row.add("Yes");
row.add("1");
rows.add(row);
row = Lists.newArrayList();
row.add("binary");
row.add("binary");
row.add("63");
row.add("Yes");
row.add("Yes");
row.add("1");
rows.add(row);
row = Lists.newArrayList();
row.add("gbk_chinese_ci");
row.add("gbk");
row.add("28");
row.add("Yes");
row.add("Yes");
row.add("1");
rows.add(row);
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowData() {
ShowDataStmt showStmt = (ShowDataStmt) stmt;
String dbName = showStmt.getDbName();
Database db = GlobalStateMgr.getCurrentState().getDb(dbName);
if (db == null) {
ErrorReport.reportSemanticException(ErrorCode.ERR_BAD_DB_ERROR, dbName);
}
db.readLock();
try {
String tableName = showStmt.getTableName();
List<List<String>> totalRows = showStmt.getResultRows();
if (tableName == null) {
long totalSize = 0;
long totalReplicaCount = 0;
List<Table> tables = db.getTables();
SortedSet<Table> sortedTables = new TreeSet<>(Comparator.comparing(Table::getName));
for (Table table : tables) {
if (GlobalStateMgr.getCurrentState().isUsingNewPrivilege()) {
if (!PrivilegeManager.checkAnyActionOnTable(connectContext, dbName, table.getName())) {
continue;
}
} else if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(ConnectContext.get(),
dbName, table.getName(), PrivPredicate.SHOW)) {
continue;
}
sortedTables.add(table);
}
for (Table table : sortedTables) {
if (!table.isNativeTable()) {
continue;
}
OlapTable olapTable = (OlapTable) table;
long tableSize = olapTable.getDataSize();
long replicaCount = olapTable.getReplicaCount();
Pair<Double, String> tableSizePair = DebugUtil.getByteUint(tableSize);
String readableSize = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(tableSizePair.first) + " "
+ tableSizePair.second;
List<String> row = Arrays.asList(table.getName(), readableSize, String.valueOf(replicaCount));
totalRows.add(row);
totalSize += tableSize;
totalReplicaCount += replicaCount;
}
Pair<Double, String> totalSizePair = DebugUtil.getByteUint(totalSize);
String readableSize = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(totalSizePair.first) + " "
+ totalSizePair.second;
List<String> total = Arrays.asList("Total", readableSize, String.valueOf(totalReplicaCount));
totalRows.add(total);
long quota = db.getDataQuota();
long replicaQuota = db.getReplicaQuota();
Pair<Double, String> quotaPair = DebugUtil.getByteUint(quota);
String readableQuota = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(quotaPair.first) + " "
+ quotaPair.second;
List<String> quotaRow = Arrays.asList("Quota", readableQuota, String.valueOf(replicaQuota));
totalRows.add(quotaRow);
long left = Math.max(0, quota - totalSize);
long replicaCountLeft = Math.max(0, replicaQuota - totalReplicaCount);
Pair<Double, String> leftPair = DebugUtil.getByteUint(left);
String readableLeft = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(leftPair.first) + " "
+ leftPair.second;
List<String> leftRow = Arrays.asList("Left", readableLeft, String.valueOf(replicaCountLeft));
totalRows.add(leftRow);
} else {
if (GlobalStateMgr.getCurrentState().isUsingNewPrivilege()) {
if (!PrivilegeManager.checkAnyActionOnTable(connectContext, dbName, tableName)) {
ErrorReport.reportSemanticException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "SHOW DATA",
connectContext.getQualifiedUser(),
connectContext.getRemoteIP(),
tableName);
}
} else if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(ConnectContext.get(),
dbName, tableName, PrivPredicate.SHOW)) {
ErrorReport.reportSemanticException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "SHOW DATA",
connectContext.getQualifiedUser(),
connectContext.getRemoteIP(),
tableName);
}
Table table = db.getTable(tableName);
if (table == null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR, tableName);
}
if (!table.isLocalTable()) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_NOT_OLAP_TABLE, tableName);
}
OlapTable olapTable = (OlapTable) table;
int i = 0;
long totalSize = 0;
long totalReplicaCount = 0;
Map<String, Long> indexNames = olapTable.getIndexNameToId();
Map<String, Long> sortedIndexNames = new TreeMap<String, Long>(indexNames);
for (Long indexId : sortedIndexNames.values()) {
long indexSize = 0;
long indexReplicaCount = 0;
long indexRowCount = 0;
for (Partition partition : olapTable.getAllPartitions()) {
MaterializedIndex mIndex = partition.getIndex(indexId);
indexSize += mIndex.getDataSize();
indexReplicaCount += mIndex.getReplicaCount();
indexRowCount += mIndex.getRowCount();
}
Pair<Double, String> indexSizePair = DebugUtil.getByteUint(indexSize);
String readableSize = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(indexSizePair.first) + " "
+ indexSizePair.second;
List<String> row = null;
if (i == 0) {
row = Arrays.asList(tableName,
olapTable.getIndexNameById(indexId),
readableSize, String.valueOf(indexReplicaCount),
String.valueOf(indexRowCount));
} else {
row = Arrays.asList("",
olapTable.getIndexNameById(indexId),
readableSize, String.valueOf(indexReplicaCount),
String.valueOf(indexRowCount));
}
totalSize += indexSize;
totalReplicaCount += indexReplicaCount;
totalRows.add(row);
i++;
}
Pair<Double, String> totalSizePair = DebugUtil.getByteUint(totalSize);
String readableSize = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(totalSizePair.first) + " "
+ totalSizePair.second;
List<String> row = Arrays.asList("", "Total", readableSize, String.valueOf(totalReplicaCount), "");
totalRows.add(row);
}
} catch (AnalysisException e) {
throw new SemanticException(e.getMessage());
} finally {
db.readUnlock();
}
resultSet = new ShowResultSet(showStmt.getMetaData(), showStmt.getResultRows());
}
private void handleShowPartitions() throws AnalysisException {
ShowPartitionsStmt showStmt = (ShowPartitionsStmt) stmt;
ProcNodeInterface procNodeI = showStmt.getNode();
Preconditions.checkNotNull(procNodeI);
List<List<String>> rows = ((PartitionsProcDir) procNodeI).fetchResultByFilter(showStmt.getFilterMap(),
showStmt.getOrderByPairs(), showStmt.getLimitElement()).getRows();
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowTablet() throws AnalysisException {
ShowTabletStmt showStmt = (ShowTabletStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
if (showStmt.isShowSingleTablet()) {
long tabletId = showStmt.getTabletId();
TabletInvertedIndex invertedIndex = GlobalStateMgr.getCurrentInvertedIndex();
TabletMeta tabletMeta = invertedIndex.getTabletMeta(tabletId);
Long dbId = tabletMeta != null ? tabletMeta.getDbId() : TabletInvertedIndex.NOT_EXIST_VALUE;
String dbName = null;
Long tableId = tabletMeta != null ? tabletMeta.getTableId() : TabletInvertedIndex.NOT_EXIST_VALUE;
String tableName = null;
Long partitionId = tabletMeta != null ? tabletMeta.getPartitionId() : TabletInvertedIndex.NOT_EXIST_VALUE;
String partitionName = null;
Long indexId = tabletMeta != null ? tabletMeta.getIndexId() : TabletInvertedIndex.NOT_EXIST_VALUE;
String indexName = null;
Boolean isSync = true;
do {
Database db = globalStateMgr.getDb(dbId);
if (db == null) {
isSync = false;
break;
}
dbName = db.getFullName();
db.readLock();
try {
Table table = db.getTable(tableId);
if (table == null || !(table instanceof OlapTable)) {
isSync = false;
break;
}
tableName = table.getName();
OlapTable olapTable = (OlapTable) table;
Partition partition = olapTable.getPartition(partitionId);
if (partition == null) {
isSync = false;
break;
}
partitionName = partition.getName();
MaterializedIndex index = partition.getIndex(indexId);
if (index == null) {
isSync = false;
break;
}
indexName = olapTable.getIndexNameById(indexId);
if (table.isLakeTable()) {
break;
}
LocalTablet tablet = (LocalTablet) index.getTablet(tabletId);
if (tablet == null) {
isSync = false;
break;
}
List<Replica> replicas = tablet.getImmutableReplicas();
for (Replica replica : replicas) {
Replica tmp = invertedIndex.getReplica(tabletId, replica.getBackendId());
if (tmp == null) {
isSync = false;
break;
}
if (tmp != replica) {
isSync = false;
break;
}
}
} finally {
db.readUnlock();
}
} while (false);
String detailCmd = String.format("SHOW PROC '/dbs/%d/%d/partitions/%d/%d/%d';",
dbId, tableId, partitionId, indexId, tabletId);
rows.add(Lists.newArrayList(dbName, tableName, partitionName, indexName,
dbId.toString(), tableId.toString(),
partitionId.toString(), indexId.toString(),
isSync.toString(), detailCmd));
} else {
Database db = globalStateMgr.getDb(showStmt.getDbName());
MetaUtils.checkDbNullAndReport(db, showStmt.getDbName());
db.readLock();
try {
Table table = db.getTable(showStmt.getTableName());
if (table == null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR, showStmt.getTableName());
}
if (!table.isNativeTable()) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_NOT_OLAP_TABLE, showStmt.getTableName());
}
OlapTable olapTable = (OlapTable) table;
long sizeLimit = -1;
if (showStmt.hasOffset() && showStmt.hasLimit()) {
sizeLimit = showStmt.getOffset() + showStmt.getLimit();
} else if (showStmt.hasLimit()) {
sizeLimit = showStmt.getLimit();
}
boolean stop = false;
Collection<Partition> partitions = new ArrayList<Partition>();
if (showStmt.hasPartition()) {
PartitionNames partitionNames = showStmt.getPartitionNames();
for (String partName : partitionNames.getPartitionNames()) {
Partition partition = olapTable.getPartition(partName, partitionNames.isTemp());
if (partition == null) {
throw new AnalysisException("Unknown partition: " + partName);
}
partitions.add(partition);
}
} else {
partitions = olapTable.getPartitions();
}
List<List<Comparable>> tabletInfos = new ArrayList<>();
String indexName = showStmt.getIndexName();
long indexId = -1;
if (indexName != null) {
Long id = olapTable.getIndexIdByName(indexName);
if (id == null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR, showStmt.getIndexName());
}
indexId = id;
}
for (Partition partition : partitions) {
if (stop) {
break;
}
for (MaterializedIndex index : partition.getMaterializedIndices(IndexExtState.ALL)) {
if (indexId > -1 && index.getId() != indexId) {
continue;
}
if (olapTable.isLakeTable()) {
LakeTabletsProcNode procNode = new LakeTabletsProcNode(db, (LakeTable) olapTable, index);
tabletInfos.addAll(procNode.fetchComparableResult());
} else {
LocalTabletsProcDir procDir = new LocalTabletsProcDir(db, olapTable, index);
tabletInfos.addAll(procDir.fetchComparableResult(
showStmt.getVersion(), showStmt.getBackendId(), showStmt.getReplicaState()));
}
if (sizeLimit > -1 && tabletInfos.size() >= sizeLimit) {
stop = true;
break;
}
}
}
if (sizeLimit > -1 && tabletInfos.size() < sizeLimit) {
tabletInfos.clear();
} else if (sizeLimit > -1) {
tabletInfos = tabletInfos.subList((int) showStmt.getOffset(), (int) sizeLimit);
}
List<OrderByPair> orderByPairs = showStmt.getOrderByPairs();
ListComparator<List<Comparable>> comparator = null;
if (orderByPairs != null) {
OrderByPair[] orderByPairArr = new OrderByPair[orderByPairs.size()];
comparator = new ListComparator<>(orderByPairs.toArray(orderByPairArr));
} else {
comparator = new ListComparator<>(0, 1);
}
Collections.sort(tabletInfos, comparator);
for (List<Comparable> tabletInfo : tabletInfos) {
List<String> oneTablet = new ArrayList<String>(tabletInfo.size());
for (Comparable column : tabletInfo) {
oneTablet.add(column.toString());
}
rows.add(oneTablet);
}
} finally {
db.readUnlock();
}
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowBroker() {
ShowBrokerStmt showStmt = (ShowBrokerStmt) stmt;
List<List<String>> rowSet = GlobalStateMgr.getCurrentState().getBrokerMgr().getBrokersInfo();
resultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);
}
private void handleShowResources() {
ShowResourcesStmt showStmt = (ShowResourcesStmt) stmt;
List<List<String>> rowSet = GlobalStateMgr.getCurrentState().getResourceMgr().getResourcesInfo();
resultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);
}
private void handleShowExport() throws AnalysisException {
ShowExportStmt showExportStmt = (ShowExportStmt) stmt;
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
Database db = globalStateMgr.getDb(showExportStmt.getDbName());
MetaUtils.checkDbNullAndReport(db, showExportStmt.getDbName());
long dbId = db.getId();
ExportMgr exportMgr = globalStateMgr.getExportMgr();
Set<ExportJob.JobState> states = null;
ExportJob.JobState state = showExportStmt.getJobState();
if (state != null) {
states = Sets.newHashSet(state);
}
List<List<String>> infos = exportMgr.getExportJobInfosByIdOrState(
dbId, showExportStmt.getJobId(), states, showExportStmt.getQueryId(),
showExportStmt.getOrderByPairs(), showExportStmt.getLimit());
resultSet = new ShowResultSet(showExportStmt.getMetaData(), infos);
}
private void handleShowBackends() {
final ShowBackendsStmt showStmt = (ShowBackendsStmt) stmt;
List<List<String>> backendInfos = BackendsProcDir.getClusterBackendInfos();
resultSet = new ShowResultSet(showStmt.getMetaData(), backendInfos);
}
private void handleShowFrontends() {
final ShowFrontendsStmt showStmt = (ShowFrontendsStmt) stmt;
List<List<String>> infos = Lists.newArrayList();
FrontendsProcNode.getFrontendsInfo(GlobalStateMgr.getCurrentState(), infos);
resultSet = new ShowResultSet(showStmt.getMetaData(), infos);
}
private void handleShowRepositories() {
final ShowRepositoriesStmt showStmt = (ShowRepositoriesStmt) stmt;
List<List<String>> repoInfos = GlobalStateMgr.getCurrentState().getBackupHandler().getRepoMgr().getReposInfo();
resultSet = new ShowResultSet(showStmt.getMetaData(), repoInfos);
}
private void handleShowSnapshot() throws AnalysisException {
final ShowSnapshotStmt showStmt = (ShowSnapshotStmt) stmt;
Repository repo =
GlobalStateMgr.getCurrentState().getBackupHandler().getRepoMgr().getRepo(showStmt.getRepoName());
if (repo == null) {
throw new AnalysisException("Repository " + showStmt.getRepoName() + " does not exist");
}
List<List<String>> snapshotInfos = repo.getSnapshotInfos(showStmt.getSnapshotName(), showStmt.getTimestamp());
resultSet = new ShowResultSet(showStmt.getMetaData(), snapshotInfos);
}
private void handleShowBackup() throws AnalysisException {
ShowBackupStmt showStmt = (ShowBackupStmt) stmt;
Database db = GlobalStateMgr.getCurrentState().getDb(showStmt.getDbName());
MetaUtils.checkDbNullAndReport(db, showStmt.getDbName());
AbstractJob jobI = GlobalStateMgr.getCurrentState().getBackupHandler().getJob(db.getId());
if (!(jobI instanceof BackupJob)) {
resultSet = new ShowResultSet(showStmt.getMetaData(), EMPTY_SET);
return;
}
BackupJob backupJob = (BackupJob) jobI;
if (GlobalStateMgr.getCurrentState().isUsingNewPrivilege()) {
List<TableRef> tableRefs = backupJob.getTableRef();
AtomicBoolean privilegeDeny = new AtomicBoolean(false);
tableRefs.forEach(tableRef -> {
TableName tableName = tableRef.getName();
if (!PrivilegeManager.checkTableAction(connectContext, tableName.getDb(), tableName.getTbl(),
PrivilegeType.EXPORT)) {
privilegeDeny.set(true);
}
});
if (privilegeDeny.get()) {
resultSet = new ShowResultSet(showStmt.getMetaData(), EMPTY_SET);
return;
}
}
List<String> info = backupJob.getInfo();
List<List<String>> infos = Lists.newArrayList();
infos.add(info);
resultSet = new ShowResultSet(showStmt.getMetaData(), infos);
}
private void handleShowRestore() throws AnalysisException {
ShowRestoreStmt showStmt = (ShowRestoreStmt) stmt;
Database db = GlobalStateMgr.getCurrentState().getDb(showStmt.getDbName());
MetaUtils.checkDbNullAndReport(db, showStmt.getDbName());
AbstractJob jobI = GlobalStateMgr.getCurrentState().getBackupHandler().getJob(db.getId());
if (!(jobI instanceof RestoreJob)) {
resultSet = new ShowResultSet(showStmt.getMetaData(), EMPTY_SET);
return;
}
RestoreJob restoreJob = (RestoreJob) jobI;
List<String> info = restoreJob.getInfo();
List<List<String>> infos = Lists.newArrayList();
infos.add(info);
resultSet = new ShowResultSet(showStmt.getMetaData(), infos);
}
private void handleShowGrants() {
ShowGrantsStmt showStmt = (ShowGrantsStmt) stmt;
if (GlobalStateMgr.getCurrentState().isUsingNewPrivilege()) {
PrivilegeManager privilegeManager = GlobalStateMgr.getCurrentState().getPrivilegeManager();
try {
if (showStmt.getRole() != null) {
List<List<String>> infos = new ArrayList<>();
Long roleId = privilegeManager.getRoleIdByNameAllowNull(showStmt.getRole());
if (roleId == null) {
throw new SemanticException("There is no such grant defined for role " + showStmt.getRole());
}
RolePrivilegeCollection rolePrivilegeCollection =
privilegeManager.getRolePrivilegeCollectionUnlocked(roleId, true);
List<String> parentRoleNameList = new ArrayList<>();
for (Long parentRoleId : rolePrivilegeCollection.getParentRoleIds()) {
RolePrivilegeCollection parentRolePriv =
privilegeManager.getRolePrivilegeCollectionUnlocked(parentRoleId, true);
parentRoleNameList.add(parentRolePriv.getName());
List<String> info = Lists.newArrayList(showStmt.getRole(), null,
AstToSQLBuilder.toSQL(new GrantRoleStmt(parentRoleNameList, showStmt.getRole())));
infos.add(info);
}
Map<Short, List<PrivilegeCollection.PrivilegeEntry>> typeToPrivilegeEntryList =
rolePrivilegeCollection.getTypeToPrivilegeEntryList();
for (Map.Entry<Short, List<PrivilegeCollection.PrivilegeEntry>> typeToPrivilegeEntry
: typeToPrivilegeEntryList.entrySet()) {
for (PrivilegeCollection.PrivilegeEntry privilegeEntry : typeToPrivilegeEntry.getValue()) {
List<String> info = new ArrayList<>();
String roleName = showStmt.getRole();
info.add(roleName);
ObjectType objectType = privilegeManager.getObjectType(typeToPrivilegeEntry.getKey());
if (objectType.equals(ObjectType.CATALOG)) {
CatalogPEntryObject catalogPEntryObject = (CatalogPEntryObject) privilegeEntry.getObject();
if (catalogPEntryObject.getId() == CatalogPEntryObject.ALL_CATALOG_ID) {
info.add(null);
} else {
List<Catalog> catalogs = new ArrayList<>(
GlobalStateMgr.getCurrentState().getCatalogMgr().getCatalogs().values());
Optional<Catalog> catalogOptional = catalogs.stream().filter(
catalog -> catalog.getId() == catalogPEntryObject.getId()
).findFirst();
if (!catalogOptional.isPresent()) {
throw new SemanticException("can't find catalog");
}
Catalog catalog = catalogOptional.get();
info.add(catalog.getName());
}
} else {
info.add("default");
}
GrantPrivilegeStmt grantPrivilegeStmt =
new GrantPrivilegeStmt(new ArrayList<>(), objectType.name(), roleName);
grantPrivilegeStmt.setObjectType(objectType);
grantPrivilegeStmt.setActionList(privilegeEntry.getActionSet());
grantPrivilegeStmt.setObjectList(Lists.newArrayList(privilegeEntry.getObject()));
info.add(AstToStringBuilder.toString(grantPrivilegeStmt));
infos.add(info);
}
}
resultSet = new ShowResultSet(showStmt.getMetaData(), infos);
} else {
List<List<String>> infos = new ArrayList<>();
UserIdentity userIdentity = showStmt.getUserIdent();
UserPrivilegeCollection userPrivilegeCollection =
privilegeManager.getUserPrivilegeCollectionUnlocked(userIdentity);
Set<Long> allRoles = userPrivilegeCollection.getAllRoles();
if (!allRoles.isEmpty()) {
infos.add(Lists.newArrayList(userIdentity.toString(), null, AstToSQLBuilder.toSQL(
new GrantRoleStmt(allRoles.stream().map(roleId -> {
try {
return privilegeManager
.getRolePrivilegeCollectionUnlocked(roleId, true).getName();
} catch (PrivilegeException e) {
throw new RuntimeException(e);
}
}).collect(Collectors.toList()), userIdentity))));
}
Map<Short, List<PrivilegeCollection.PrivilegeEntry>> typeToPrivilegeEntryList =
userPrivilegeCollection.getTypeToPrivilegeEntryList();
for (Map.Entry<Short, List<PrivilegeCollection.PrivilegeEntry>> typeToPrivilegeEntry
: typeToPrivilegeEntryList.entrySet()) {
for (PrivilegeCollection.PrivilegeEntry privilegeEntry : typeToPrivilegeEntry.getValue()) {
List<String> info = new ArrayList<>();
info.add(userIdentity.toString());
ObjectType objectType = privilegeManager.getObjectType(typeToPrivilegeEntry.getKey());
if (objectType.equals(ObjectType.CATALOG)) {
CatalogPEntryObject catalogPEntryObject = (CatalogPEntryObject) privilegeEntry.getObject();
if (catalogPEntryObject.getId() == CatalogPEntryObject.ALL_CATALOG_ID) {
info.add(null);
} else {
List<Catalog> catalogs = new ArrayList<>(
GlobalStateMgr.getCurrentState().getCatalogMgr().getCatalogs().values());
Optional<Catalog> catalogOptional = catalogs.stream().filter(
catalog -> catalog.getId() == catalogPEntryObject.getId()
).findFirst();
if (!catalogOptional.isPresent()) {
throw new SemanticException("can't find catalog");
}
Catalog catalog = catalogOptional.get();
info.add(catalog.getName());
List<String> privList = new ArrayList<>();
for (Map.Entry<String, Action> actionEntry
: ObjectType.CATALOG.getActionMap().entrySet()) {
if (privilegeEntry.getActionSet().contains(actionEntry.getValue())) {
privList.add(actionEntry.getValue().getName());
}
}
GrantPrivilegeStmt grantPrivilegeStmt =
new GrantPrivilegeStmt(privList, ObjectType.CATALOG.name(), userIdentity);
info.add(AstToStringBuilder.toString(grantPrivilegeStmt));
}
} else {
info.add("default");
}
GrantPrivilegeStmt grantPrivilegeStmt =
new GrantPrivilegeStmt(new ArrayList<>(), objectType.name(), userIdentity);
grantPrivilegeStmt.setObjectType(objectType);
grantPrivilegeStmt.setActionList(privilegeEntry.getActionSet());
grantPrivilegeStmt.setObjectList(Lists.newArrayList(privilegeEntry.getObject()));
info.add(AstToStringBuilder.toString(grantPrivilegeStmt));
infos.add(info);
}
}
resultSet = new ShowResultSet(showStmt.getMetaData(), infos);
}
} catch (PrivilegeException e) {
throw new SemanticException(e.getMessage());
}
} else {
List<List<String>> infos = GlobalStateMgr.getCurrentState().getAuth().getGrantsSQLs(showStmt.getUserIdent());
resultSet = new ShowResultSet(showStmt.getMetaData(), infos);
}
}
private void handleShowRoles() throws AnalysisException {
ShowRolesStmt showStmt = (ShowRolesStmt) stmt;
if (GlobalStateMgr.getCurrentState().isUsingNewPrivilege()) {
try {
List<List<String>> infos = new ArrayList<>();
PrivilegeManager privilegeManager = GlobalStateMgr.getCurrentState().getPrivilegeManager();
Map<Long, RolePrivilegeCollection> rolePrivilegeCollectionMap = privilegeManager.getRoleIdToPrivilegeCollection();
for (Map.Entry<Long, RolePrivilegeCollection> rolePrivilegeCollectionEntry
: rolePrivilegeCollectionMap.entrySet()) {
List<String> info = new ArrayList<>();
RolePrivilegeCollection rolePrivilegeCollection =
privilegeManager.getRolePrivilegeCollectionUnlocked(rolePrivilegeCollectionEntry.getKey(), true);
info.add(rolePrivilegeCollection.getName());
infos.add(info);
}
resultSet = new ShowResultSet(showStmt.getMetaData(), infos);
} catch (PrivilegeException e) {
throw new AnalysisException(e.getMessage());
}
} else {
List<List<String>> infos = GlobalStateMgr.getCurrentState().getAuth().getRoleInfo();
resultSet = new ShowResultSet(showStmt.getMetaData(), infos);
}
}
private void handleShowUser() {
List<List<String>> rowSet = Lists.newArrayList();
ShowUserStmt showUserStmt = (ShowUserStmt) stmt;
if (showUserStmt.isAll()) {
Set<UserIdentity> userIdentities =
GlobalStateMgr.getCurrentState().getPrivilegeManager().getUserToPrivilegeCollection().keySet();
for (UserIdentity userIdentity : userIdentities) {
List<String> row = Lists.newArrayList();
row.add(userIdentity.toString());
rowSet.add(row);
}
} else {
List<String> row = Lists.newArrayList();
row.add(connectContext.getCurrentUserIdentity().toString());
rowSet.add(row);
}
resultSet = new ShowResultSet(stmt.getMetaData(), rowSet);
}
private void handleAdminShowTabletStatus() throws AnalysisException {
AdminShowReplicaStatusStmt showStmt = (AdminShowReplicaStatusStmt) stmt;
List<List<String>> results;
try {
results = MetadataViewer.getTabletStatus(showStmt);
} catch (DdlException e) {
throw new AnalysisException(e.getMessage());
}
resultSet = new ShowResultSet(showStmt.getMetaData(), results);
}
private void handleAdminShowTabletDistribution() throws AnalysisException {
AdminShowReplicaDistributionStmt showStmt = (AdminShowReplicaDistributionStmt) stmt;
List<List<String>> results;
try {
results = MetadataViewer.getTabletDistribution(showStmt);
} catch (DdlException e) {
throw new AnalysisException(e.getMessage());
}
resultSet = new ShowResultSet(showStmt.getMetaData(), results);
}
private void handleAdminShowConfig() throws AnalysisException {
AdminShowConfigStmt showStmt = (AdminShowConfigStmt) stmt;
List<List<String>> results;
try {
PatternMatcher matcher = null;
if (showStmt.getPattern() != null) {
matcher = PatternMatcher.createMysqlPattern(showStmt.getPattern(),
CaseSensibility.CONFIG.getCaseSensibility());
}
results = ConfigBase.getConfigInfo(matcher);
Collections.sort(results, new Comparator<List<String>>() {
@Override
public int compare(List<String> o1, List<String> o2) {
return o1.get(0).compareTo(o2.get(0));
}
});
} catch (DdlException e) {
throw new AnalysisException(e.getMessage());
}
resultSet = new ShowResultSet(showStmt.getMetaData(), results);
}
private void handleShowSmallFiles() throws AnalysisException {
ShowSmallFilesStmt showStmt = (ShowSmallFilesStmt) stmt;
List<List<String>> results;
try {
results = GlobalStateMgr.getCurrentState().getSmallFileMgr().getInfo(showStmt.getDbName());
} catch (DdlException e) {
throw new AnalysisException(e.getMessage());
}
resultSet = new ShowResultSet(showStmt.getMetaData(), results);
}
private void handleShowDynamicPartition() {
ShowDynamicPartitionStmt showDynamicPartitionStmt = (ShowDynamicPartitionStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
Database db = connectContext.getGlobalStateMgr().getDb(showDynamicPartitionStmt.getDb());
if (db != null) {
db.readLock();
try {
for (Table tbl : db.getTables()) {
if (!(tbl instanceof OlapTable)) {
continue;
}
DynamicPartitionScheduler dynamicPartitionScheduler =
GlobalStateMgr.getCurrentState().getDynamicPartitionScheduler();
OlapTable olapTable = (OlapTable) tbl;
if (!olapTable.dynamicPartitionExists()) {
dynamicPartitionScheduler.removeRuntimeInfo(olapTable.getName());
continue;
}
if (GlobalStateMgr.getCurrentState().isUsingNewPrivilege()) {
if (!PrivilegeManager.checkAnyActionOnTable(ConnectContext.get(),
db.getFullName(), olapTable.getName())) {
continue;
}
} else {
if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(ConnectContext.get(),
db.getFullName(), olapTable.getName(),
PrivPredicate.SHOW)) {
continue;
}
}
DynamicPartitionProperty dynamicPartitionProperty =
olapTable.getTableProperty().getDynamicPartitionProperty();
String tableName = olapTable.getName();
int replicationNum = dynamicPartitionProperty.getReplicationNum();
replicationNum = (replicationNum == DynamicPartitionProperty.NOT_SET_REPLICATION_NUM) ?
olapTable.getDefaultReplicationNum() : FeConstants.default_replication_num;
rows.add(Lists.newArrayList(
tableName,
String.valueOf(dynamicPartitionProperty.getEnable()),
dynamicPartitionProperty.getTimeUnit().toUpperCase(),
String.valueOf(dynamicPartitionProperty.getStart()),
String.valueOf(dynamicPartitionProperty.getEnd()),
dynamicPartitionProperty.getPrefix(),
String.valueOf(dynamicPartitionProperty.getBuckets()),
String.valueOf(replicationNum),
dynamicPartitionProperty.getStartOfInfo(),
dynamicPartitionScheduler
.getRuntimeInfo(tableName, DynamicPartitionScheduler.LAST_UPDATE_TIME),
dynamicPartitionScheduler
.getRuntimeInfo(tableName, DynamicPartitionScheduler.LAST_SCHEDULER_TIME),
dynamicPartitionScheduler
.getRuntimeInfo(tableName, DynamicPartitionScheduler.DYNAMIC_PARTITION_STATE),
dynamicPartitionScheduler
.getRuntimeInfo(tableName, DynamicPartitionScheduler.CREATE_PARTITION_MSG),
dynamicPartitionScheduler
.getRuntimeInfo(tableName, DynamicPartitionScheduler.DROP_PARTITION_MSG)));
}
} finally {
db.readUnlock();
}
resultSet = new ShowResultSet(showDynamicPartitionStmt.getMetaData(), rows);
}
}
private void handleShowTransaction() throws AnalysisException {
ShowTransactionStmt showStmt = (ShowTransactionStmt) stmt;
Database db = connectContext.getGlobalStateMgr().getDb(showStmt.getDbName());
MetaUtils.checkDbNullAndReport(db, showStmt.getDbName());
long txnId = showStmt.getTxnId();
GlobalTransactionMgr transactionMgr = GlobalStateMgr.getCurrentGlobalTransactionMgr();
resultSet = new ShowResultSet(showStmt.getMetaData(), transactionMgr.getSingleTranInfo(db.getId(), txnId));
}
private void handleShowPlugins() throws AnalysisException {
ShowPluginsStmt pluginsStmt = (ShowPluginsStmt) stmt;
List<List<String>> rows = GlobalStateMgr.getCurrentPluginMgr().getPluginShowInfos();
resultSet = new ShowResultSet(pluginsStmt.getMetaData(), rows);
}
private void handleShowSqlBlackListStmt() throws AnalysisException {
ShowSqlBlackListStmt showStmt = (ShowSqlBlackListStmt) stmt;
List<List<String>> rows = new ArrayList<List<String>>();
for (Map.Entry<String, BlackListSql> entry : SqlBlackList.getInstance().sqlBlackListMap.entrySet()) {
List<String> oneSql = new ArrayList<String>();
oneSql.add(String.valueOf(entry.getValue().id));
oneSql.add(entry.getKey());
rows.add(oneSql);
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowAnalyzeJob() {
List<AnalyzeJob> jobs = connectContext.getGlobalStateMgr().getAnalyzeManager().getAllAnalyzeJobList();
List<List<String>> rows = Lists.newArrayList();
jobs.sort(Comparator.comparing(AnalyzeJob::getId));
for (AnalyzeJob job : jobs) {
try {
List<String> result = ShowAnalyzeJobStmt.showAnalyzeJobs(connectContext, job);
if (result != null) {
rows.add(result);
}
} catch (MetaNotFoundException e) {
}
}
rows = doPredicate(stmt, stmt.getMetaData(), rows);
resultSet = new ShowResultSet(stmt.getMetaData(), rows);
}
private void handleShowAnalyzeStatus() {
List<AnalyzeStatus> statuses = new ArrayList<>(connectContext.getGlobalStateMgr().getAnalyzeManager()
.getAnalyzeStatusMap().values());
List<List<String>> rows = Lists.newArrayList();
statuses.sort(Comparator.comparing(AnalyzeStatus::getId));
for (AnalyzeStatus status : statuses) {
try {
List<String> result = ShowAnalyzeStatusStmt.showAnalyzeStatus(connectContext, status);
if (result != null) {
rows.add(result);
}
} catch (MetaNotFoundException e) {
}
}
rows = doPredicate(stmt, stmt.getMetaData(), rows);
resultSet = new ShowResultSet(stmt.getMetaData(), rows);
}
private void handleShowBasicStatsMeta() {
List<BasicStatsMeta> metas = new ArrayList<>(connectContext.getGlobalStateMgr().getAnalyzeManager()
.getBasicStatsMetaMap().values());
List<List<String>> rows = Lists.newArrayList();
for (BasicStatsMeta meta : metas) {
try {
List<String> result = ShowBasicStatsMetaStmt.showBasicStatsMeta(connectContext, meta);
if (result != null) {
rows.add(result);
}
} catch (MetaNotFoundException e) {
}
}
rows = doPredicate(stmt, stmt.getMetaData(), rows);
resultSet = new ShowResultSet(stmt.getMetaData(), rows);
}
private void handleShowHistogramStatsMeta() {
List<HistogramStatsMeta> metas = new ArrayList<>(connectContext.getGlobalStateMgr().getAnalyzeManager()
.getHistogramStatsMetaMap().values());
List<List<String>> rows = Lists.newArrayList();
for (HistogramStatsMeta meta : metas) {
try {
List<String> result = ShowHistogramStatsMetaStmt.showHistogramStatsMeta(connectContext, meta);
if (result != null) {
rows.add(result);
}
} catch (MetaNotFoundException e) {
}
}
rows = doPredicate(stmt, stmt.getMetaData(), rows);
resultSet = new ShowResultSet(stmt.getMetaData(), rows);
}
private void handleShowResourceGroup() throws AnalysisException {
ShowResourceGroupStmt showResourceGroupStmt = (ShowResourceGroupStmt) stmt;
List<List<String>> rows =
GlobalStateMgr.getCurrentState().getResourceGroupMgr().showResourceGroup(showResourceGroupStmt);
resultSet = new ShowResultSet(showResourceGroupStmt.getMetaData(), rows);
}
private void handleShowCatalogs() {
ShowCatalogsStmt showCatalogsStmt = (ShowCatalogsStmt) stmt;
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
CatalogMgr catalogMgr = globalStateMgr.getCatalogMgr();
List<List<String>> rowSet = catalogMgr.getCatalogsInfo().stream()
.filter(row -> {
if (globalStateMgr.isUsingNewPrivilege()) {
return PrivilegeManager.checkAnyActionOnCatalog(connectContext, row.get(0));
} else {
return true;
}
}
)
.sorted(Comparator.comparing(o -> o.get(0))).collect(Collectors.toList());
resultSet = new ShowResultSet(showCatalogsStmt.getMetaData(), rowSet);
}
private void handleShowWarehouses() {
ShowWarehousesStmt showStmt = (ShowWarehousesStmt) stmt;
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
WarehouseManager warehouseMgr = globalStateMgr.getWarehouseMgr();
List<List<String>> rowSet = warehouseMgr.getWarehousesInfo().stream()
.sorted(Comparator.comparing(o -> o.get(0))).collect(Collectors.toList());
resultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);
}
private void handleShowClusters() {
ShowClustersStmt showStmt = (ShowClustersStmt) stmt;
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
WarehouseManager warehouseMgr = globalStateMgr.getWarehouseMgr();
Warehouse warehouse = warehouseMgr.getWarehouse(showStmt.getWarehouseName());
List<List<String>> rowSet = warehouse.getClustersInfo().stream()
.sorted(Comparator.comparing(o -> o.get(0))).collect(Collectors.toList());
resultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);
}
private List<List<String>> doPredicate(ShowStmt showStmt,
ShowResultSetMetaData showResultSetMetaData,
List<List<String>> rows) {
Predicate predicate = showStmt.getPredicate();
if (predicate == null) {
return rows;
}
SlotRef slotRef = (SlotRef) predicate.getChild(0);
StringLiteral stringLiteral = (StringLiteral) predicate.getChild(1);
List<List<String>> returnRows = new ArrayList<>();
BinaryPredicate binaryPredicate = (BinaryPredicate) predicate;
int idx = showResultSetMetaData.getColumnIdx(slotRef.getColumnName());
if (binaryPredicate.getOp().isEquivalence()) {
for (List<String> row : rows) {
if (row.get(idx).equals(stringLiteral.getStringValue())) {
returnRows.add(row);
}
}
}
return returnRows;
}
private void handleShowCreateExternalCatalog() {
ShowCreateExternalCatalogStmt showStmt = (ShowCreateExternalCatalogStmt) stmt;
String catalogName = showStmt.getCatalogName();
Catalog catalog = connectContext.getGlobalStateMgr().getCatalogMgr().getCatalogByName(catalogName);
List<List<String>> rows = Lists.newArrayList();
if (InternalCatalog.DEFAULT_INTERNAL_CATALOG_NAME.equalsIgnoreCase(catalogName)) {
resultSet = new ShowResultSet(stmt.getMetaData(), rows);
return;
}
StringBuilder createCatalogSql = new StringBuilder();
createCatalogSql.append("CREATE EXTERNAL CATALOG ")
.append("`").append(catalogName).append("`")
.append("\n");
String comment = catalog.getComment();
if (comment != null) {
createCatalogSql.append("comment \"").append(catalog.getComment()).append("\"\n");
}
createCatalogSql.append("PROPERTIES (")
.append(new PrintableMap<>(catalog.getConfig(), " = ", true, true))
.append("\n)");
rows.add(Lists.newArrayList(catalogName, createCatalogSql.toString()));
resultSet = new ShowResultSet(stmt.getMetaData(), rows);
}
} | class ShowExecutor {
private static final Logger LOG = LogManager.getLogger(ShowExecutor.class);
private static final List<List<String>> EMPTY_SET = Lists.newArrayList();
private final ConnectContext connectContext;
private final ShowStmt stmt;
private ShowResultSet resultSet;
private final MetadataMgr metadataMgr;
public ShowExecutor(ConnectContext connectContext, ShowStmt stmt) {
this.connectContext = connectContext;
this.stmt = stmt;
resultSet = null;
metadataMgr = GlobalStateMgr.getCurrentState().getMetadataMgr();
}
public ShowResultSet execute() throws AnalysisException, DdlException {
if (stmt instanceof ShowMaterializedViewStmt) {
handleShowMaterializedView();
} else if (stmt instanceof ShowAuthorStmt) {
handleShowAuthor();
} else if (stmt instanceof ShowProcStmt) {
handleShowProc();
} else if (stmt instanceof HelpStmt) {
handleHelp();
} else if (stmt instanceof ShowWarehousesStmt) {
handleShowWarehouses();
} else if (stmt instanceof ShowClustersStmt) {
handleShowClusters();
} else if (stmt instanceof ShowDbStmt) {
handleShowDb();
} else if (stmt instanceof ShowTableStmt) {
handleShowTable();
} else if (stmt instanceof ShowTableStatusStmt) {
handleShowTableStatus();
} else if (stmt instanceof DescribeStmt) {
handleDescribe();
} else if (stmt instanceof ShowCreateTableStmt) {
handleShowCreateTable();
} else if (stmt instanceof ShowCreateDbStmt) {
handleShowCreateDb();
} else if (stmt instanceof ShowProcesslistStmt) {
handleShowProcesslist();
} else if (stmt instanceof ShowEnginesStmt) {
handleShowEngines();
} else if (stmt instanceof ShowFunctionsStmt) {
handleShowFunctions();
} else if (stmt instanceof ShowVariablesStmt) {
handleShowVariables();
} else if (stmt instanceof ShowColumnStmt) {
handleShowColumn();
} else if (stmt instanceof ShowLoadStmt) {
handleShowLoad();
} else if (stmt instanceof ShowRoutineLoadStmt) {
handleShowRoutineLoad();
} else if (stmt instanceof ShowRoutineLoadTaskStmt) {
handleShowRoutineLoadTask();
} else if (stmt instanceof ShowStreamLoadStmt) {
handleShowStreamLoad();
} else if (stmt instanceof ShowDeleteStmt) {
handleShowDelete();
} else if (stmt instanceof ShowAlterStmt) {
handleShowAlter();
} else if (stmt instanceof ShowUserPropertyStmt) {
handleShowUserProperty();
} else if (stmt instanceof ShowDataStmt) {
handleShowData();
} else if (stmt instanceof ShowCollationStmt) {
handleShowCollation();
} else if (stmt instanceof ShowPartitionsStmt) {
handleShowPartitions();
} else if (stmt instanceof ShowTabletStmt) {
handleShowTablet();
} else if (stmt instanceof ShowBackupStmt) {
handleShowBackup();
} else if (stmt instanceof ShowRestoreStmt) {
handleShowRestore();
} else if (stmt instanceof ShowBrokerStmt) {
handleShowBroker();
} else if (stmt instanceof ShowResourcesStmt) {
handleShowResources();
} else if (stmt instanceof ShowExportStmt) {
handleShowExport();
} else if (stmt instanceof ShowBackendsStmt) {
handleShowBackends();
} else if (stmt instanceof ShowFrontendsStmt) {
handleShowFrontends();
} else if (stmt instanceof ShowRepositoriesStmt) {
handleShowRepositories();
} else if (stmt instanceof ShowSnapshotStmt) {
handleShowSnapshot();
} else if (stmt instanceof ShowGrantsStmt) {
handleShowGrants();
} else if (stmt instanceof ShowRolesStmt) {
handleShowRoles();
} else if (stmt instanceof AdminShowReplicaStatusStmt) {
handleAdminShowTabletStatus();
} else if (stmt instanceof AdminShowReplicaDistributionStmt) {
handleAdminShowTabletDistribution();
} else if (stmt instanceof AdminShowConfigStmt) {
handleAdminShowConfig();
} else if (stmt instanceof ShowSmallFilesStmt) {
handleShowSmallFiles();
} else if (stmt instanceof ShowDynamicPartitionStmt) {
handleShowDynamicPartition();
} else if (stmt instanceof ShowIndexStmt) {
handleShowIndex();
} else if (stmt instanceof ShowTransactionStmt) {
handleShowTransaction();
} else if (stmt instanceof ShowPluginsStmt) {
handleShowPlugins();
} else if (stmt instanceof ShowSqlBlackListStmt) {
handleShowSqlBlackListStmt();
} else if (stmt instanceof ShowAnalyzeJobStmt) {
handleShowAnalyzeJob();
} else if (stmt instanceof ShowAnalyzeStatusStmt) {
handleShowAnalyzeStatus();
} else if (stmt instanceof ShowBasicStatsMetaStmt) {
handleShowBasicStatsMeta();
} else if (stmt instanceof ShowHistogramStatsMetaStmt) {
handleShowHistogramStatsMeta();
} else if (stmt instanceof ShowResourceGroupStmt) {
handleShowResourceGroup();
} else if (stmt instanceof ShowUserStmt) {
handleShowUser();
} else if (stmt instanceof ShowCatalogsStmt) {
handleShowCatalogs();
} else if (stmt instanceof ShowComputeNodesStmt) {
handleShowComputeNodes();
} else if (stmt instanceof ShowAuthenticationStmt) {
handleShowAuthentication();
} else if (stmt instanceof ShowCreateExternalCatalogStmt) {
handleShowCreateExternalCatalog();
} else {
handleEmtpy();
}
List<List<String>> rows = doPredicate(stmt, stmt.getMetaData(), resultSet.getResultRows());
return new ShowResultSet(resultSet.getMetaData(), rows);
}
private void handleShowAuthentication() {
final ShowAuthenticationStmt showAuthenticationStmt = (ShowAuthenticationStmt) stmt;
if (connectContext.getGlobalStateMgr().isUsingNewPrivilege()) {
AuthenticationManager authenticationManager = GlobalStateMgr.getCurrentState().getAuthenticationManager();
List<List<String>> userAuthInfos = Lists.newArrayList();
Map<UserIdentity, UserAuthenticationInfo> authenticationInfoMap = new HashMap<>();
if (showAuthenticationStmt.isAll()) {
authenticationInfoMap.putAll(authenticationManager.getUserToAuthenticationInfo());
} else {
UserAuthenticationInfo userAuthenticationInfo;
if (showAuthenticationStmt.getUserIdent() == null) {
userAuthenticationInfo = authenticationManager
.getUserAuthenticationInfoByUserIdentity(connectContext.getCurrentUserIdentity());
} else {
userAuthenticationInfo =
authenticationManager.getUserAuthenticationInfoByUserIdentity(showAuthenticationStmt.getUserIdent());
}
authenticationInfoMap.put(showAuthenticationStmt.getUserIdent(), userAuthenticationInfo);
}
for (Map.Entry<UserIdentity, UserAuthenticationInfo> entry : authenticationInfoMap.entrySet()) {
UserAuthenticationInfo userAuthenticationInfo = entry.getValue();
userAuthInfos.add(Lists.newArrayList(
entry.getKey().toString(),
userAuthenticationInfo.getPassword().length == 0 ? "No" : "Yes",
userAuthenticationInfo.getAuthPlugin(),
userAuthenticationInfo.getTextForAuthPlugin()));
}
resultSet = new ShowResultSet(showAuthenticationStmt.getMetaData(), userAuthInfos);
} else {
List<List<String>> rows;
if (showAuthenticationStmt.isAll()) {
rows = GlobalStateMgr.getCurrentState().getAuth()
.getAuthenticationInfo(showAuthenticationStmt.getUserIdent());
} else {
if (showAuthenticationStmt.getUserIdent() == null) {
rows = GlobalStateMgr.getCurrentState().getAuth()
.getAuthenticationInfo(connectContext.getCurrentUserIdentity());
} else {
rows = GlobalStateMgr.getCurrentState().getAuth()
.getAuthenticationInfo(showAuthenticationStmt.getUserIdent());
}
}
resultSet = new ShowResultSet(showAuthenticationStmt.getMetaData(), rows);
}
}
private void handleShowComputeNodes() {
final ShowComputeNodesStmt showStmt = (ShowComputeNodesStmt) stmt;
List<List<String>> computeNodesInfos = ComputeNodeProcDir.getClusterComputeNodesInfos();
resultSet = new ShowResultSet(showStmt.getMetaData(), computeNodesInfos);
}
private void handleShowMaterializedView() throws AnalysisException {
ShowMaterializedViewStmt showMaterializedViewStmt = (ShowMaterializedViewStmt) stmt;
String dbName = showMaterializedViewStmt.getDb();
List<List<String>> rowSets = Lists.newArrayList();
Database db = GlobalStateMgr.getCurrentState().getDb(dbName);
MetaUtils.checkDbNullAndReport(db, dbName);
db.readLock();
try {
PatternMatcher matcher = null;
if (showMaterializedViewStmt.getPattern() != null) {
matcher = PatternMatcher.createMysqlPattern(showMaterializedViewStmt.getPattern(),
CaseSensibility.TABLE.getCaseSensibility());
}
for (MaterializedView mvTable : db.getMaterializedViews()) {
if (GlobalStateMgr.getCurrentState().isUsingNewPrivilege()) {
AtomicBoolean baseTableHasPrivilege = new AtomicBoolean(true);
mvTable.getBaseTableInfos().forEach(baseTableInfo -> {
Table baseTable = baseTableInfo.getTable();
if (baseTable != null && baseTable.isLocalTable() && !PrivilegeManager.
checkTableAction(connectContext, baseTableInfo.getDbName(),
baseTableInfo.getTableName(),
PrivilegeType.SELECT)) {
baseTableHasPrivilege.set(false);
}
});
if (!baseTableHasPrivilege.get()) {
continue;
}
if (!PrivilegeManager.checkAnyActionOnMaterializedView(connectContext, db.getFullName(),
mvTable.getName())) {
continue;
}
}
if (matcher != null && !matcher.match(mvTable.getName())) {
continue;
}
List<String> resultRow = Lists.newArrayList(String.valueOf(mvTable.getId()), mvTable.getName(), dbName,
mvTable.getMaterializedViewDdlStmt(true), String.valueOf(mvTable.getRowCount()));
rowSets.add(resultRow);
}
for (Table table : db.getTables()) {
if (table.getType() == Table.TableType.OLAP) {
OlapTable olapTable = (OlapTable) table;
List<MaterializedIndex> visibleMaterializedViews = olapTable.getVisibleIndex();
long baseIdx = olapTable.getBaseIndexId();
for (MaterializedIndex mvIdx : visibleMaterializedViews) {
if (baseIdx == mvIdx.getId()) {
continue;
}
if (matcher != null && !matcher.match(olapTable.getIndexNameById(mvIdx.getId()))) {
continue;
}
ArrayList<String> resultRow = new ArrayList<>();
MaterializedIndexMeta mvMeta = olapTable.getVisibleIndexIdToMeta().get(mvIdx.getId());
resultRow.add(String.valueOf(mvIdx.getId()));
resultRow.add(olapTable.getIndexNameById(mvIdx.getId()));
resultRow.add(dbName);
if (mvMeta.getOriginStmt() == null) {
StringBuilder originStmtBuilder = new StringBuilder(
"create materialized view " + olapTable.getIndexNameById(mvIdx.getId()) +
" as select ");
String groupByString = "";
for (Column column : mvMeta.getSchema()) {
if (column.isKey()) {
groupByString += column.getName() + ",";
}
}
originStmtBuilder.append(groupByString);
for (Column column : mvMeta.getSchema()) {
if (!column.isKey()) {
originStmtBuilder.append(column.getAggregationType().toString()).append("(")
.append(column.getName()).append(")").append(",");
}
}
originStmtBuilder.delete(originStmtBuilder.length() - 1, originStmtBuilder.length());
originStmtBuilder.append(" from ").append(olapTable.getName()).append(" group by ")
.append(groupByString);
originStmtBuilder.delete(originStmtBuilder.length() - 1, originStmtBuilder.length());
resultRow.add(originStmtBuilder.toString());
} else {
resultRow.add(mvMeta.getOriginStmt().replace("\n", "").replace("\t", "")
.replaceAll("[ ]+", " "));
}
resultRow.add(String.valueOf(mvIdx.getRowCount()));
rowSets.add(resultRow);
}
}
}
} finally {
db.readUnlock();
}
resultSet = new ShowResultSet(stmt.getMetaData(), rowSets);
}
private void handleShowProcesslist() {
ShowProcesslistStmt showStmt = (ShowProcesslistStmt) stmt;
List<List<String>> rowSet = Lists.newArrayList();
List<ConnectContext.ThreadInfo> threadInfos = connectContext.getConnectScheduler()
.listConnection(connectContext.getQualifiedUser());
long nowMs = System.currentTimeMillis();
for (ConnectContext.ThreadInfo info : threadInfos) {
List<String> row = info.toRow(nowMs, showStmt.showFull());
if (row != null) {
rowSet.add(row);
}
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);
}
private void handleEmtpy() {
resultSet = new ShowResultSet(stmt.getMetaData(), EMPTY_SET);
}
private void handleShowAuthor() {
ShowAuthorStmt showAuthorStmt = (ShowAuthorStmt) stmt;
List<List<String>> rowSet = Lists.newArrayList();
resultSet = new ShowResultSet(showAuthorStmt.getMetaData(), rowSet);
}
private void handleShowEngines() {
ShowEnginesStmt showStmt = (ShowEnginesStmt) stmt;
List<List<String>> rowSet = Lists.newArrayList();
rowSet.add(Lists.newArrayList("OLAP", "YES", "Default storage engine of StarRocks", "NO", "NO", "NO"));
rowSet.add(Lists.newArrayList("MySQL", "YES", "MySQL server which data is in it", "NO", "NO", "NO"));
rowSet.add(Lists.newArrayList("ELASTICSEARCH", "YES", "ELASTICSEARCH cluster which data is in it", "NO", "NO",
"NO"));
rowSet.add(Lists.newArrayList("HIVE", "YES", "HIVE database which data is in it", "NO", "NO", "NO"));
rowSet.add(Lists.newArrayList("ICEBERG", "YES", "ICEBERG data lake which data is in it", "NO", "NO", "NO"));
resultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);
}
private void handleShowFunctions() throws AnalysisException {
ShowFunctionsStmt showStmt = (ShowFunctionsStmt) stmt;
List<Function> functions = null;
if (showStmt.getIsBuiltin()) {
functions = connectContext.getGlobalStateMgr().getBuiltinFunctions();
} else if (showStmt.getIsGlobal()) {
functions = connectContext.getGlobalStateMgr().getGlobalFunctionMgr().getFunctions();
} else {
Database db = connectContext.getGlobalStateMgr().getDb(showStmt.getDbName());
MetaUtils.checkDbNullAndReport(db, showStmt.getDbName());
functions = db.getFunctions();
}
List<List<Comparable>> rowSet = Lists.newArrayList();
for (Function function : functions) {
List<Comparable> row = function.getInfo(showStmt.getIsVerbose());
if (showStmt.getWild() == null || showStmt.like(function.functionName())) {
rowSet.add(row);
}
}
ListComparator<List<Comparable>> comparator = null;
OrderByPair orderByPair = new OrderByPair(0, false);
comparator = new ListComparator<>(orderByPair);
Collections.sort(rowSet, comparator);
List<List<String>> resultRowSet = Lists.newArrayList();
Set<String> functionNameSet = new HashSet<>();
for (List<Comparable> row : rowSet) {
List<String> resultRow = Lists.newArrayList();
if (functionNameSet.contains(row.get(0).toString())) {
continue;
}
for (Comparable column : row) {
resultRow.add(column.toString());
}
resultRowSet.add(resultRow);
functionNameSet.add(resultRow.get(0));
}
ShowResultSetMetaData showMetaData = showStmt.getIsVerbose() ? showStmt.getMetaData() :
ShowResultSetMetaData.builder()
.addColumn(new Column("Function Name", ScalarType.createVarchar(256))).build();
resultSet = new ShowResultSet(showMetaData, resultRowSet);
}
private void handleShowProc() throws AnalysisException {
ShowProcStmt showProcStmt = (ShowProcStmt) stmt;
ShowResultSetMetaData metaData = showProcStmt.getMetaData();
ProcNodeInterface procNode = showProcStmt.getNode();
List<List<String>> finalRows = procNode.fetchResult().getRows();
resultSet = new ShowResultSet(metaData, finalRows);
}
private void handleShowDb() throws AnalysisException, DdlException {
ShowDbStmt showDbStmt = (ShowDbStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
List<String> dbNames = new ArrayList<>();
String catalogName;
if (showDbStmt.getCatalogName() == null) {
catalogName = connectContext.getCurrentCatalog();
} else {
catalogName = showDbStmt.getCatalogName();
}
dbNames = metadataMgr.listDbNames(catalogName);
PatternMatcher matcher = null;
if (showDbStmt.getPattern() != null) {
matcher = PatternMatcher.createMysqlPattern(showDbStmt.getPattern(),
CaseSensibility.DATABASE.getCaseSensibility());
}
Set<String> dbNameSet = Sets.newTreeSet();
for (String dbName : dbNames) {
if (matcher != null && !matcher.match(dbName)) {
continue;
}
if (connectContext.getGlobalStateMgr().isUsingNewPrivilege()) {
if (CatalogMgr.isInternalCatalog(catalogName) &&
!PrivilegeManager.checkAnyActionOnOrInDb(connectContext, dbName)) {
continue;
}
} else {
if (!PrivilegeChecker.checkDbPriv(connectContext, catalogName, dbName, PrivPredicate.SHOW)) {
continue;
}
}
dbNameSet.add(dbName);
}
for (String dbName : dbNameSet) {
rows.add(Lists.newArrayList(dbName));
}
resultSet = new ShowResultSet(showDbStmt.getMetaData(), rows);
}
private void handleShowTable() throws AnalysisException {
ShowTableStmt showTableStmt = (ShowTableStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
String catalogName = showTableStmt.getCatalogName();
if (catalogName == null) {
catalogName = connectContext.getCurrentCatalog();
}
String dbName = showTableStmt.getDb();
Database db = metadataMgr.getDb(catalogName, dbName);
PatternMatcher matcher = null;
if (showTableStmt.getPattern() != null) {
matcher = PatternMatcher.createMysqlPattern(showTableStmt.getPattern(),
CaseSensibility.TABLE.getCaseSensibility());
}
Map<String, String> tableMap = Maps.newTreeMap();
MetaUtils.checkDbNullAndReport(db, showTableStmt.getDb());
if (CatalogMgr.isInternalCatalog(catalogName)) {
db.readLock();
try {
for (Table tbl : db.getTables()) {
if (matcher != null && !matcher.match(tbl.getName())) {
continue;
}
if (connectContext.getGlobalStateMgr().isUsingNewPrivilege()) {
if (!PrivilegeManager.checkAnyActionOnTable(connectContext, db.getFullName(), tbl.getName())) {
continue;
}
} else {
if (!PrivilegeChecker.checkTblPriv(ConnectContext.get(), catalogName,
db.getFullName(), tbl.getName(), PrivPredicate.SHOW)) {
continue;
}
}
tableMap.put(tbl.getName(), tbl.getMysqlType());
}
} finally {
db.readUnlock();
}
} else {
List<String> tableNames = metadataMgr.listTableNames(catalogName, dbName);
if (matcher != null) {
tableNames = tableNames.stream().filter(matcher::match).collect(Collectors.toList());
}
tableNames.forEach(name -> tableMap.put(name, "BASE TABLE"));
}
for (Map.Entry<String, String> entry : tableMap.entrySet()) {
if (showTableStmt.isVerbose()) {
rows.add(Lists.newArrayList(entry.getKey(), entry.getValue()));
} else {
rows.add(Lists.newArrayList(entry.getKey()));
}
}
resultSet = new ShowResultSet(showTableStmt.getMetaData(), rows);
}
private void handleShowTableStatus() throws AnalysisException {
ShowTableStatusStmt showStmt = (ShowTableStatusStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
Database db = connectContext.getGlobalStateMgr().getDb(showStmt.getDb());
if (db != null) {
db.readLock();
try {
PatternMatcher matcher = null;
if (showStmt.getPattern() != null) {
matcher = PatternMatcher.createMysqlPattern(showStmt.getPattern(),
CaseSensibility.TABLE.getCaseSensibility());
}
for (Table table : db.getTables()) {
if (matcher != null && !matcher.match(table.getName())) {
continue;
}
if (GlobalStateMgr.getCurrentState().isUsingNewPrivilege()) {
if (!PrivilegeManager.checkAnyActionOnTable(connectContext, db.getFullName(), table.getName())) {
continue;
}
} else if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(ConnectContext.get(),
db.getFullName(), table.getName(),
PrivPredicate.SHOW)) {
continue;
}
List<String> row = Lists.newArrayList();
row.add(table.getName());
row.add(table.getEngine());
for (int i = 0; i < 15; ++i) {
row.add(null);
}
row.add(table.getComment());
rows.add(row);
}
} finally {
db.readUnlock();
}
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowVariables() throws AnalysisException {
ShowVariablesStmt showStmt = (ShowVariablesStmt) stmt;
PatternMatcher matcher = null;
if (showStmt.getPattern() != null) {
matcher = PatternMatcher.createMysqlPattern(showStmt.getPattern(),
CaseSensibility.VARIABLES.getCaseSensibility());
}
List<List<String>> rows = VariableMgr.dump(showStmt.getType(), connectContext.getSessionVariable(), matcher);
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowCreateDb() throws AnalysisException {
ShowCreateDbStmt showStmt = (ShowCreateDbStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
Database db = connectContext.getGlobalStateMgr().getDb(showStmt.getDb());
MetaUtils.checkDbNullAndReport(db, showStmt.getDb());
rows.add(Lists.newArrayList(showStmt.getDb(),
"CREATE DATABASE `" + showStmt.getDb() + "`"));
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowCreateTable() throws AnalysisException {
ShowCreateTableStmt showStmt = (ShowCreateTableStmt) stmt;
TableName tbl = showStmt.getTbl();
String catalogName = tbl.getCatalog();
if (catalogName == null) {
catalogName = connectContext.getCurrentCatalog();
}
if (CatalogMgr.isInternalCatalog(catalogName)) {
showCreateInternalCatalogTable(showStmt);
} else {
showCreateExternalCatalogTable(tbl, catalogName);
}
}
private void showCreateExternalCatalogTable(TableName tbl, String catalogName) {
String dbName = tbl.getDb();
String tableName = tbl.getTbl();
MetadataMgr metadataMgr = GlobalStateMgr.getCurrentState().getMetadataMgr();
Database db = metadataMgr.getDb(catalogName, dbName);
if (db == null) {
ErrorReport.reportSemanticException(ErrorCode.ERR_BAD_DB_ERROR, dbName);
}
Table table = metadataMgr.getTable(catalogName, dbName, tableName);
if (table == null) {
ErrorReport.reportSemanticException(ErrorCode.ERR_BAD_TABLE_ERROR, tableName);
}
StringBuilder createTableSql = new StringBuilder();
createTableSql.append("CREATE TABLE ")
.append("`").append(tableName).append("`")
.append(" (\n");
List<String> columns = table.getFullSchema().stream().map(
this::toMysqlDDL).collect(Collectors.toList());
createTableSql.append(String.join(",\n", columns))
.append("\n)");
if (table.getType() != JDBC && !table.isUnPartitioned()) {
createTableSql.append("\nWITH (\n partitioned_by = ARRAY [ ");
createTableSql.append(String.join(", ", table.getPartitionColumnNames())).append(" ]\n)");
}
String location = null;
if (table.isHiveTable() || table.isHudiTable()) {
location = ((HiveMetaStoreTable) table).getTableLocation();
} else if (table.isIcebergTable()) {
location = ((IcebergTable) table).getTableLocation();
} else if (table.isDeltalakeTable()) {
location = ((DeltaLakeTable) table).getTableLocation();
}
if (!Strings.isNullOrEmpty(location)) {
createTableSql.append("\nLOCATION ").append("'").append(location).append("'");
}
List<List<String>> rows = Lists.newArrayList();
rows.add(Lists.newArrayList(tableName, createTableSql.toString()));
resultSet = new ShowResultSet(stmt.getMetaData(), rows);
}
private String toMysqlDDL(Column column) {
StringBuilder sb = new StringBuilder();
sb.append(" `").append(column.getName()).append("` ");
switch (column.getType().getPrimitiveType()) {
case TINYINT:
sb.append("tinyint(4)");
break;
case SMALLINT:
sb.append("smallint(6)");
break;
case INT:
sb.append("int(11)");
break;
case BIGINT:
sb.append("bigint(20)");
break;
case FLOAT:
sb.append("float");
break;
case DOUBLE:
sb.append("double");
case DECIMAL32:
case DECIMAL64:
case DECIMAL128:
case DECIMALV2:
sb.append("decimal");
break;
case DATE:
case DATETIME:
sb.append("datetime");
break;
case CHAR:
case VARCHAR:
sb.append("varchar(1048576)");
break;
default:
sb.append("binary(1048576)");
}
sb.append(" DEFAULT NULL");
return sb.toString();
}
private void showCreateInternalCatalogTable(ShowCreateTableStmt showStmt) throws AnalysisException {
Database db = connectContext.getGlobalStateMgr().getDb(showStmt.getDb());
MetaUtils.checkDbNullAndReport(db, showStmt.getDb());
List<List<String>> rows = Lists.newArrayList();
db.readLock();
try {
Table table = db.getTable(showStmt.getTable());
if (table == null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR, showStmt.getTable());
}
List<String> createTableStmt = Lists.newArrayList();
GlobalStateMgr.getDdlStmt(table, createTableStmt, null, null, false, true /* hide password */);
if (createTableStmt.isEmpty()) {
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
return;
}
if (table instanceof View) {
if (showStmt.getType() == ShowCreateTableStmt.CreateTableType.MATERIALIZED_VIEW) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_WRONG_OBJECT, showStmt.getDb(),
showStmt.getTable(), "MATERIALIZED VIEW");
}
View view = (View) table;
StringBuilder sb = new StringBuilder();
sb.append("CREATE VIEW `").append(table.getName()).append("` AS ").append(view.getInlineViewDef());
rows.add(Lists.newArrayList(table.getName(), createTableStmt.get(0), "utf8", "utf8_general_ci"));
resultSet = new ShowResultSet(ShowCreateTableStmt.getViewMetaData(), rows);
} else if (table instanceof MaterializedView) {
if (showStmt.getType() == ShowCreateTableStmt.CreateTableType.VIEW) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_WRONG_OBJECT, showStmt.getDb(),
showStmt.getTable(), "VIEW");
}
rows.add(Lists.newArrayList(table.getName(), createTableStmt.get(0)));
resultSet = new ShowResultSet(ShowCreateTableStmt.getMaterializedViewMetaData(), rows);
} else {
if (showStmt.getType() != ShowCreateTableStmt.CreateTableType.TABLE) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_WRONG_OBJECT, showStmt.getDb(),
showStmt.getTable(), showStmt.getType().getValue());
}
rows.add(Lists.newArrayList(table.getName(), createTableStmt.get(0)));
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
} finally {
db.readUnlock();
}
}
private void handleDescribe() throws AnalysisException {
DescribeStmt describeStmt = (DescribeStmt) stmt;
resultSet = new ShowResultSet(describeStmt.getMetaData(), describeStmt.getResultRows());
}
private void handleShowColumn() throws AnalysisException {
ShowColumnStmt showStmt = (ShowColumnStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
Database db = connectContext.getGlobalStateMgr().getDb(showStmt.getDb());
MetaUtils.checkDbNullAndReport(db, showStmt.getDb());
db.readLock();
try {
Table table = db.getTable(showStmt.getTable());
if (table == null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR,
showStmt.getDb() + "." + showStmt.getTable());
}
PatternMatcher matcher = null;
if (showStmt.getPattern() != null) {
matcher = PatternMatcher.createMysqlPattern(showStmt.getPattern(),
CaseSensibility.COLUMN.getCaseSensibility());
}
List<Column> columns = table.getBaseSchema();
for (Column col : columns) {
if (matcher != null && !matcher.match(col.getName())) {
continue;
}
final String columnName = col.getName();
final String columnType = col.getType().canonicalName().toLowerCase();
final String isAllowNull = col.isAllowNull() ? "YES" : "NO";
final String isKey = col.isKey() ? "YES" : "NO";
final String defaultValue = col.getMetaDefaultValue(Lists.newArrayList());
final String aggType = col.getAggregationType() == null
|| col.isAggregationTypeImplicit() ? "" : col.getAggregationType().toSql();
if (showStmt.isVerbose()) {
rows.add(Lists.newArrayList(columnName,
columnType,
"",
isAllowNull,
isKey,
defaultValue,
aggType,
"",
col.getComment()));
} else {
rows.add(Lists.newArrayList(columnName,
columnType,
isAllowNull,
isKey,
defaultValue,
aggType));
}
}
} finally {
db.readUnlock();
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowIndex() throws AnalysisException {
ShowIndexStmt showStmt = (ShowIndexStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
Database db = connectContext.getGlobalStateMgr().getDb(showStmt.getDbName());
MetaUtils.checkDbNullAndReport(db, showStmt.getDbName());
db.readLock();
try {
Table table = db.getTable(showStmt.getTableName().getTbl());
if (table == null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR,
db.getOriginName() + "." + showStmt.getTableName().toString());
} else if (table instanceof OlapTable) {
List<Index> indexes = ((OlapTable) table).getIndexes();
for (Index index : indexes) {
rows.add(Lists.newArrayList(showStmt.getTableName().toString(), "", index.getIndexName(),
"", String.join(",", index.getColumns()), "", "", "", "",
"", index.getIndexType().name(), index.getComment()));
}
} else {
}
} finally {
db.readUnlock();
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleHelp() {
HelpStmt helpStmt = (HelpStmt) stmt;
String mark = helpStmt.getMask();
HelpModule module = HelpModule.getInstance();
HelpTopic topic = module.getTopic(mark);
if (topic == null) {
List<String> topics = module.listTopicByKeyword(mark);
if (topics.size() == 0) {
topic = null;
} else if (topics.size() == 1) {
topic = module.getTopic(topics.get(0));
} else {
List<List<String>> rows = Lists.newArrayList();
for (String str : topics) {
rows.add(Lists.newArrayList(str, "N"));
}
List<String> categories = module.listCategoryByName(mark);
for (String str : categories) {
rows.add(Lists.newArrayList(str, "Y"));
}
resultSet = new ShowResultSet(helpStmt.getKeywordMetaData(), rows);
return;
}
}
if (topic != null) {
resultSet = new ShowResultSet(helpStmt.getMetaData(), Lists.<List<String>>newArrayList(
Lists.newArrayList(topic.getName(), topic.getDescription(), topic.getExample())));
} else {
List<String> categories = module.listCategoryByName(mark);
if (categories.isEmpty()) {
resultSet = new ShowResultSet(helpStmt.getKeywordMetaData(), EMPTY_SET);
} else if (categories.size() > 1) {
resultSet = new ShowResultSet(helpStmt.getCategoryMetaData(),
Lists.<List<String>>newArrayList(categories));
} else {
List<List<String>> rows = Lists.newArrayList();
List<String> topics = module.listTopicByCategory(categories.get(0));
for (String str : topics) {
rows.add(Lists.newArrayList(str, "N"));
}
List<String> subCategories = module.listCategoryByCategory(categories.get(0));
for (String str : subCategories) {
rows.add(Lists.newArrayList(str, "Y"));
}
resultSet = new ShowResultSet(helpStmt.getKeywordMetaData(), rows);
}
}
}
private void handleShowLoad() throws AnalysisException {
ShowLoadStmt showStmt = (ShowLoadStmt) stmt;
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
Database db = globalStateMgr.getDb(showStmt.getDbName());
MetaUtils.checkDbNullAndReport(db, showStmt.getDbName());
long dbId = db.getId();
Set<String> statesValue = showStmt.getStates() == null ? null : showStmt.getStates().stream()
.map(entity -> entity.name())
.collect(Collectors.toSet());
List<List<Comparable>> loadInfos =
globalStateMgr.getLoadManager().getLoadJobInfosByDb(dbId, showStmt.getLabelValue(),
showStmt.isAccurateMatch(),
statesValue);
List<OrderByPair> orderByPairs = showStmt.getOrderByPairs();
ListComparator<List<Comparable>> comparator = null;
if (orderByPairs != null) {
OrderByPair[] orderByPairArr = new OrderByPair[orderByPairs.size()];
comparator = new ListComparator<List<Comparable>>(orderByPairs.toArray(orderByPairArr));
} else {
comparator = new ListComparator<List<Comparable>>(0);
}
Collections.sort(loadInfos, comparator);
List<List<String>> rows = Lists.newArrayList();
for (List<Comparable> loadInfo : loadInfos) {
List<String> oneInfo = new ArrayList<String>(loadInfo.size());
for (Comparable element : loadInfo) {
oneInfo.add(element.toString());
}
rows.add(oneInfo);
}
long limit = showStmt.getLimit();
long offset = showStmt.getOffset() == -1L ? 0 : showStmt.getOffset();
if (offset >= rows.size()) {
rows = Lists.newArrayList();
} else if (limit != -1L) {
if ((limit + offset) < rows.size()) {
rows = rows.subList((int) offset, (int) (limit + offset));
} else {
rows = rows.subList((int) offset, rows.size());
}
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowRoutineLoadTask() throws AnalysisException {
ShowRoutineLoadTaskStmt showRoutineLoadTaskStmt = (ShowRoutineLoadTaskStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
RoutineLoadJob routineLoadJob;
try {
routineLoadJob =
GlobalStateMgr.getCurrentState().getRoutineLoadManager()
.getJob(showRoutineLoadTaskStmt.getDbFullName(),
showRoutineLoadTaskStmt.getJobName());
} catch (MetaNotFoundException e) {
LOG.warn(e.getMessage(), e);
throw new AnalysisException(e.getMessage());
}
if (routineLoadJob == null) {
throw new AnalysisException("The job named " + showRoutineLoadTaskStmt.getJobName() + "does not exists "
+ "or job state is stopped or cancelled");
}
String dbFullName = showRoutineLoadTaskStmt.getDbFullName();
String tableName;
try {
tableName = routineLoadJob.getTableName();
} catch (MetaNotFoundException e) {
throw new AnalysisException(
"The table metadata of job has been changed. The job will be cancelled automatically", e);
}
if (connectContext.getGlobalStateMgr().isUsingNewPrivilege()) {
if (!PrivilegeManager.checkAnyActionOnTable(connectContext, dbFullName, tableName)) {
resultSet = new ShowResultSet(showRoutineLoadTaskStmt.getMetaData(), rows);
return;
}
} else {
if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(ConnectContext.get(),
dbFullName,
tableName,
PrivPredicate.LOAD)) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "LOAD",
ConnectContext.get().getQualifiedUser(),
ConnectContext.get().getRemoteIP(),
tableName);
}
}
rows.addAll(routineLoadJob.getTasksShowInfo());
resultSet = new ShowResultSet(showRoutineLoadTaskStmt.getMetaData(), rows);
}
private void handleShowStreamLoad() throws AnalysisException {
ShowStreamLoadStmt showStreamLoadStmt = (ShowStreamLoadStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
List<StreamLoadTask> streamLoadTaskList;
try {
streamLoadTaskList = GlobalStateMgr.getCurrentState().getStreamLoadManager()
.getTask(showStreamLoadStmt.getDbFullName(),
showStreamLoadStmt.getName(),
showStreamLoadStmt.isIncludeHistory());
} catch (MetaNotFoundException e) {
LOG.warn(e.getMessage(), e);
throw new AnalysisException(e.getMessage());
}
if (streamLoadTaskList != null) {
StreamLoadFunctionalExprProvider fProvider = showStreamLoadStmt.getFunctionalExprProvider(this.connectContext);
rows = streamLoadTaskList.parallelStream()
.filter(fProvider.getPredicateChain())
.sorted(fProvider.getOrderComparator())
.skip(fProvider.getSkipCount())
.limit(fProvider.getLimitCount())
.map(task -> task.getShowInfo())
.collect(Collectors.toList());
}
if (!Strings.isNullOrEmpty(showStreamLoadStmt.getName()) && rows.isEmpty()) {
throw new AnalysisException("There is no label named " + showStreamLoadStmt.getName()
+ " in db " + showStreamLoadStmt.getDbFullName()
+ ". Include history? " + showStreamLoadStmt.isIncludeHistory());
}
resultSet = new ShowResultSet(showStreamLoadStmt.getMetaData(), rows);
}
private void handleShowUserProperty() throws AnalysisException {
ShowUserPropertyStmt showStmt = (ShowUserPropertyStmt) stmt;
resultSet = new ShowResultSet(showStmt.getMetaData(), showStmt.getRows(connectContext));
}
private void handleShowDelete() throws AnalysisException {
ShowDeleteStmt showStmt = (ShowDeleteStmt) stmt;
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
Database db = globalStateMgr.getDb(showStmt.getDbName());
MetaUtils.checkDbNullAndReport(db, showStmt.getDbName());
long dbId = db.getId();
DeleteHandler deleteHandler = globalStateMgr.getDeleteHandler();
Load load = globalStateMgr.getLoadInstance();
List<List<Comparable>> deleteInfos = deleteHandler.getDeleteInfosByDb(dbId);
List<List<String>> rows = Lists.newArrayList();
for (List<Comparable> deleteInfo : deleteInfos) {
List<String> oneInfo = new ArrayList<String>(deleteInfo.size());
for (Comparable element : deleteInfo) {
oneInfo.add(element.toString());
}
rows.add(oneInfo);
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowAlter() throws AnalysisException {
ShowAlterStmt showStmt = (ShowAlterStmt) stmt;
ProcNodeInterface procNodeI = showStmt.getNode();
Preconditions.checkNotNull(procNodeI);
List<List<String>> rows;
if (procNodeI instanceof SchemaChangeProcDir) {
rows = ((SchemaChangeProcDir) procNodeI).fetchResultByFilter(showStmt.getFilterMap(),
showStmt.getOrderPairs(), showStmt.getLimitElement()).getRows();
} else {
rows = procNodeI.fetchResult().getRows();
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowCollation() {
ShowCollationStmt showStmt = (ShowCollationStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
List<String> row = Lists.newArrayList();
row.add("utf8_general_ci");
row.add("utf8");
row.add("33");
row.add("Yes");
row.add("Yes");
row.add("1");
rows.add(row);
row = Lists.newArrayList();
row.add("binary");
row.add("binary");
row.add("63");
row.add("Yes");
row.add("Yes");
row.add("1");
rows.add(row);
row = Lists.newArrayList();
row.add("gbk_chinese_ci");
row.add("gbk");
row.add("28");
row.add("Yes");
row.add("Yes");
row.add("1");
rows.add(row);
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowData() {
ShowDataStmt showStmt = (ShowDataStmt) stmt;
String dbName = showStmt.getDbName();
Database db = GlobalStateMgr.getCurrentState().getDb(dbName);
if (db == null) {
ErrorReport.reportSemanticException(ErrorCode.ERR_BAD_DB_ERROR, dbName);
}
db.readLock();
try {
String tableName = showStmt.getTableName();
List<List<String>> totalRows = showStmt.getResultRows();
if (tableName == null) {
long totalSize = 0;
long totalReplicaCount = 0;
List<Table> tables = db.getTables();
SortedSet<Table> sortedTables = new TreeSet<>(Comparator.comparing(Table::getName));
for (Table table : tables) {
if (GlobalStateMgr.getCurrentState().isUsingNewPrivilege()) {
if (!PrivilegeManager.checkAnyActionOnTable(connectContext, dbName, table.getName())) {
continue;
}
} else if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(ConnectContext.get(),
dbName, table.getName(), PrivPredicate.SHOW)) {
continue;
}
sortedTables.add(table);
}
for (Table table : sortedTables) {
if (!table.isNativeTable()) {
continue;
}
OlapTable olapTable = (OlapTable) table;
long tableSize = olapTable.getDataSize();
long replicaCount = olapTable.getReplicaCount();
Pair<Double, String> tableSizePair = DebugUtil.getByteUint(tableSize);
String readableSize = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(tableSizePair.first) + " "
+ tableSizePair.second;
List<String> row = Arrays.asList(table.getName(), readableSize, String.valueOf(replicaCount));
totalRows.add(row);
totalSize += tableSize;
totalReplicaCount += replicaCount;
}
Pair<Double, String> totalSizePair = DebugUtil.getByteUint(totalSize);
String readableSize = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(totalSizePair.first) + " "
+ totalSizePair.second;
List<String> total = Arrays.asList("Total", readableSize, String.valueOf(totalReplicaCount));
totalRows.add(total);
long quota = db.getDataQuota();
long replicaQuota = db.getReplicaQuota();
Pair<Double, String> quotaPair = DebugUtil.getByteUint(quota);
String readableQuota = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(quotaPair.first) + " "
+ quotaPair.second;
List<String> quotaRow = Arrays.asList("Quota", readableQuota, String.valueOf(replicaQuota));
totalRows.add(quotaRow);
long left = Math.max(0, quota - totalSize);
long replicaCountLeft = Math.max(0, replicaQuota - totalReplicaCount);
Pair<Double, String> leftPair = DebugUtil.getByteUint(left);
String readableLeft = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(leftPair.first) + " "
+ leftPair.second;
List<String> leftRow = Arrays.asList("Left", readableLeft, String.valueOf(replicaCountLeft));
totalRows.add(leftRow);
} else {
if (GlobalStateMgr.getCurrentState().isUsingNewPrivilege()) {
if (!PrivilegeManager.checkAnyActionOnTable(connectContext, dbName, tableName)) {
ErrorReport.reportSemanticException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "SHOW DATA",
connectContext.getQualifiedUser(),
connectContext.getRemoteIP(),
tableName);
}
} else if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(ConnectContext.get(),
dbName, tableName, PrivPredicate.SHOW)) {
ErrorReport.reportSemanticException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "SHOW DATA",
connectContext.getQualifiedUser(),
connectContext.getRemoteIP(),
tableName);
}
Table table = db.getTable(tableName);
if (table == null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR, tableName);
}
if (!table.isLocalTable()) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_NOT_OLAP_TABLE, tableName);
}
OlapTable olapTable = (OlapTable) table;
int i = 0;
long totalSize = 0;
long totalReplicaCount = 0;
Map<String, Long> indexNames = olapTable.getIndexNameToId();
Map<String, Long> sortedIndexNames = new TreeMap<String, Long>(indexNames);
for (Long indexId : sortedIndexNames.values()) {
long indexSize = 0;
long indexReplicaCount = 0;
long indexRowCount = 0;
for (Partition partition : olapTable.getAllPartitions()) {
MaterializedIndex mIndex = partition.getIndex(indexId);
indexSize += mIndex.getDataSize();
indexReplicaCount += mIndex.getReplicaCount();
indexRowCount += mIndex.getRowCount();
}
Pair<Double, String> indexSizePair = DebugUtil.getByteUint(indexSize);
String readableSize = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(indexSizePair.first) + " "
+ indexSizePair.second;
List<String> row = null;
if (i == 0) {
row = Arrays.asList(tableName,
olapTable.getIndexNameById(indexId),
readableSize, String.valueOf(indexReplicaCount),
String.valueOf(indexRowCount));
} else {
row = Arrays.asList("",
olapTable.getIndexNameById(indexId),
readableSize, String.valueOf(indexReplicaCount),
String.valueOf(indexRowCount));
}
totalSize += indexSize;
totalReplicaCount += indexReplicaCount;
totalRows.add(row);
i++;
}
Pair<Double, String> totalSizePair = DebugUtil.getByteUint(totalSize);
String readableSize = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(totalSizePair.first) + " "
+ totalSizePair.second;
List<String> row = Arrays.asList("", "Total", readableSize, String.valueOf(totalReplicaCount), "");
totalRows.add(row);
}
} catch (AnalysisException e) {
throw new SemanticException(e.getMessage());
} finally {
db.readUnlock();
}
resultSet = new ShowResultSet(showStmt.getMetaData(), showStmt.getResultRows());
}
private void handleShowPartitions() throws AnalysisException {
ShowPartitionsStmt showStmt = (ShowPartitionsStmt) stmt;
ProcNodeInterface procNodeI = showStmt.getNode();
Preconditions.checkNotNull(procNodeI);
List<List<String>> rows = ((PartitionsProcDir) procNodeI).fetchResultByFilter(showStmt.getFilterMap(),
showStmt.getOrderByPairs(), showStmt.getLimitElement()).getRows();
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowTablet() throws AnalysisException {
ShowTabletStmt showStmt = (ShowTabletStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
if (showStmt.isShowSingleTablet()) {
long tabletId = showStmt.getTabletId();
TabletInvertedIndex invertedIndex = GlobalStateMgr.getCurrentInvertedIndex();
TabletMeta tabletMeta = invertedIndex.getTabletMeta(tabletId);
Long dbId = tabletMeta != null ? tabletMeta.getDbId() : TabletInvertedIndex.NOT_EXIST_VALUE;
String dbName = null;
Long tableId = tabletMeta != null ? tabletMeta.getTableId() : TabletInvertedIndex.NOT_EXIST_VALUE;
String tableName = null;
Long partitionId = tabletMeta != null ? tabletMeta.getPartitionId() : TabletInvertedIndex.NOT_EXIST_VALUE;
String partitionName = null;
Long indexId = tabletMeta != null ? tabletMeta.getIndexId() : TabletInvertedIndex.NOT_EXIST_VALUE;
String indexName = null;
Boolean isSync = true;
do {
Database db = globalStateMgr.getDb(dbId);
if (db == null) {
isSync = false;
break;
}
dbName = db.getFullName();
db.readLock();
try {
Table table = db.getTable(tableId);
if (table == null || !(table instanceof OlapTable)) {
isSync = false;
break;
}
tableName = table.getName();
OlapTable olapTable = (OlapTable) table;
Partition partition = olapTable.getPartition(partitionId);
if (partition == null) {
isSync = false;
break;
}
partitionName = partition.getName();
MaterializedIndex index = partition.getIndex(indexId);
if (index == null) {
isSync = false;
break;
}
indexName = olapTable.getIndexNameById(indexId);
if (table.isLakeTable()) {
break;
}
LocalTablet tablet = (LocalTablet) index.getTablet(tabletId);
if (tablet == null) {
isSync = false;
break;
}
List<Replica> replicas = tablet.getImmutableReplicas();
for (Replica replica : replicas) {
Replica tmp = invertedIndex.getReplica(tabletId, replica.getBackendId());
if (tmp == null) {
isSync = false;
break;
}
if (tmp != replica) {
isSync = false;
break;
}
}
} finally {
db.readUnlock();
}
} while (false);
String detailCmd = String.format("SHOW PROC '/dbs/%d/%d/partitions/%d/%d/%d';",
dbId, tableId, partitionId, indexId, tabletId);
rows.add(Lists.newArrayList(dbName, tableName, partitionName, indexName,
dbId.toString(), tableId.toString(),
partitionId.toString(), indexId.toString(),
isSync.toString(), detailCmd));
} else {
Database db = globalStateMgr.getDb(showStmt.getDbName());
MetaUtils.checkDbNullAndReport(db, showStmt.getDbName());
db.readLock();
try {
Table table = db.getTable(showStmt.getTableName());
if (table == null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR, showStmt.getTableName());
}
if (!table.isNativeTable()) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_NOT_OLAP_TABLE, showStmt.getTableName());
}
OlapTable olapTable = (OlapTable) table;
long sizeLimit = -1;
if (showStmt.hasOffset() && showStmt.hasLimit()) {
sizeLimit = showStmt.getOffset() + showStmt.getLimit();
} else if (showStmt.hasLimit()) {
sizeLimit = showStmt.getLimit();
}
boolean stop = false;
Collection<Partition> partitions = new ArrayList<Partition>();
if (showStmt.hasPartition()) {
PartitionNames partitionNames = showStmt.getPartitionNames();
for (String partName : partitionNames.getPartitionNames()) {
Partition partition = olapTable.getPartition(partName, partitionNames.isTemp());
if (partition == null) {
throw new AnalysisException("Unknown partition: " + partName);
}
partitions.add(partition);
}
} else {
partitions = olapTable.getPartitions();
}
List<List<Comparable>> tabletInfos = new ArrayList<>();
String indexName = showStmt.getIndexName();
long indexId = -1;
if (indexName != null) {
Long id = olapTable.getIndexIdByName(indexName);
if (id == null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR, showStmt.getIndexName());
}
indexId = id;
}
for (Partition partition : partitions) {
if (stop) {
break;
}
for (MaterializedIndex index : partition.getMaterializedIndices(IndexExtState.ALL)) {
if (indexId > -1 && index.getId() != indexId) {
continue;
}
if (olapTable.isLakeTable()) {
LakeTabletsProcNode procNode = new LakeTabletsProcNode(db, (LakeTable) olapTable, index);
tabletInfos.addAll(procNode.fetchComparableResult());
} else {
LocalTabletsProcDir procDir = new LocalTabletsProcDir(db, olapTable, index);
tabletInfos.addAll(procDir.fetchComparableResult(
showStmt.getVersion(), showStmt.getBackendId(), showStmt.getReplicaState()));
}
if (sizeLimit > -1 && tabletInfos.size() >= sizeLimit) {
stop = true;
break;
}
}
}
if (sizeLimit > -1 && tabletInfos.size() < sizeLimit) {
tabletInfos.clear();
} else if (sizeLimit > -1) {
tabletInfos = tabletInfos.subList((int) showStmt.getOffset(), (int) sizeLimit);
}
List<OrderByPair> orderByPairs = showStmt.getOrderByPairs();
ListComparator<List<Comparable>> comparator = null;
if (orderByPairs != null) {
OrderByPair[] orderByPairArr = new OrderByPair[orderByPairs.size()];
comparator = new ListComparator<>(orderByPairs.toArray(orderByPairArr));
} else {
comparator = new ListComparator<>(0, 1);
}
Collections.sort(tabletInfos, comparator);
for (List<Comparable> tabletInfo : tabletInfos) {
List<String> oneTablet = new ArrayList<String>(tabletInfo.size());
for (Comparable column : tabletInfo) {
oneTablet.add(column.toString());
}
rows.add(oneTablet);
}
} finally {
db.readUnlock();
}
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowBroker() {
ShowBrokerStmt showStmt = (ShowBrokerStmt) stmt;
List<List<String>> rowSet = GlobalStateMgr.getCurrentState().getBrokerMgr().getBrokersInfo();
resultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);
}
private void handleShowResources() {
ShowResourcesStmt showStmt = (ShowResourcesStmt) stmt;
List<List<String>> rowSet = GlobalStateMgr.getCurrentState().getResourceMgr().getResourcesInfo();
resultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);
}
private void handleShowExport() throws AnalysisException {
ShowExportStmt showExportStmt = (ShowExportStmt) stmt;
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
Database db = globalStateMgr.getDb(showExportStmt.getDbName());
MetaUtils.checkDbNullAndReport(db, showExportStmt.getDbName());
long dbId = db.getId();
ExportMgr exportMgr = globalStateMgr.getExportMgr();
Set<ExportJob.JobState> states = null;
ExportJob.JobState state = showExportStmt.getJobState();
if (state != null) {
states = Sets.newHashSet(state);
}
List<List<String>> infos = exportMgr.getExportJobInfosByIdOrState(
dbId, showExportStmt.getJobId(), states, showExportStmt.getQueryId(),
showExportStmt.getOrderByPairs(), showExportStmt.getLimit());
resultSet = new ShowResultSet(showExportStmt.getMetaData(), infos);
}
private void handleShowBackends() {
final ShowBackendsStmt showStmt = (ShowBackendsStmt) stmt;
List<List<String>> backendInfos = BackendsProcDir.getClusterBackendInfos();
resultSet = new ShowResultSet(showStmt.getMetaData(), backendInfos);
}
private void handleShowFrontends() {
final ShowFrontendsStmt showStmt = (ShowFrontendsStmt) stmt;
List<List<String>> infos = Lists.newArrayList();
FrontendsProcNode.getFrontendsInfo(GlobalStateMgr.getCurrentState(), infos);
resultSet = new ShowResultSet(showStmt.getMetaData(), infos);
}
private void handleShowRepositories() {
final ShowRepositoriesStmt showStmt = (ShowRepositoriesStmt) stmt;
List<List<String>> repoInfos = GlobalStateMgr.getCurrentState().getBackupHandler().getRepoMgr().getReposInfo();
resultSet = new ShowResultSet(showStmt.getMetaData(), repoInfos);
}
private void handleShowSnapshot() throws AnalysisException {
final ShowSnapshotStmt showStmt = (ShowSnapshotStmt) stmt;
Repository repo =
GlobalStateMgr.getCurrentState().getBackupHandler().getRepoMgr().getRepo(showStmt.getRepoName());
if (repo == null) {
throw new AnalysisException("Repository " + showStmt.getRepoName() + " does not exist");
}
List<List<String>> snapshotInfos = repo.getSnapshotInfos(showStmt.getSnapshotName(), showStmt.getTimestamp());
resultSet = new ShowResultSet(showStmt.getMetaData(), snapshotInfos);
}
private void handleShowBackup() throws AnalysisException {
ShowBackupStmt showStmt = (ShowBackupStmt) stmt;
Database db = GlobalStateMgr.getCurrentState().getDb(showStmt.getDbName());
MetaUtils.checkDbNullAndReport(db, showStmt.getDbName());
AbstractJob jobI = GlobalStateMgr.getCurrentState().getBackupHandler().getJob(db.getId());
if (!(jobI instanceof BackupJob)) {
resultSet = new ShowResultSet(showStmt.getMetaData(), EMPTY_SET);
return;
}
BackupJob backupJob = (BackupJob) jobI;
if (GlobalStateMgr.getCurrentState().isUsingNewPrivilege()) {
List<TableRef> tableRefs = backupJob.getTableRef();
AtomicBoolean privilegeDeny = new AtomicBoolean(false);
tableRefs.forEach(tableRef -> {
TableName tableName = tableRef.getName();
if (!PrivilegeManager.checkTableAction(connectContext, tableName.getDb(), tableName.getTbl(),
PrivilegeType.EXPORT)) {
privilegeDeny.set(true);
}
});
if (privilegeDeny.get()) {
resultSet = new ShowResultSet(showStmt.getMetaData(), EMPTY_SET);
return;
}
}
List<String> info = backupJob.getInfo();
List<List<String>> infos = Lists.newArrayList();
infos.add(info);
resultSet = new ShowResultSet(showStmt.getMetaData(), infos);
}
private void handleShowRestore() throws AnalysisException {
ShowRestoreStmt showStmt = (ShowRestoreStmt) stmt;
Database db = GlobalStateMgr.getCurrentState().getDb(showStmt.getDbName());
MetaUtils.checkDbNullAndReport(db, showStmt.getDbName());
AbstractJob jobI = GlobalStateMgr.getCurrentState().getBackupHandler().getJob(db.getId());
if (!(jobI instanceof RestoreJob)) {
resultSet = new ShowResultSet(showStmt.getMetaData(), EMPTY_SET);
return;
}
RestoreJob restoreJob = (RestoreJob) jobI;
List<String> info = restoreJob.getInfo();
List<List<String>> infos = Lists.newArrayList();
infos.add(info);
resultSet = new ShowResultSet(showStmt.getMetaData(), infos);
}
private void handleShowGrants() {
ShowGrantsStmt showStmt = (ShowGrantsStmt) stmt;
if (GlobalStateMgr.getCurrentState().isUsingNewPrivilege()) {
PrivilegeManager privilegeManager = GlobalStateMgr.getCurrentState().getPrivilegeManager();
try {
if (showStmt.getRole() != null) {
List<List<String>> infos = new ArrayList<>();
Long roleId = privilegeManager.getRoleIdByNameAllowNull(showStmt.getRole());
if (roleId == null) {
throw new SemanticException("There is no such grant defined for role " + showStmt.getRole());
}
RolePrivilegeCollection rolePrivilegeCollection =
privilegeManager.getRolePrivilegeCollectionUnlocked(roleId, true);
List<String> parentRoleNameList = new ArrayList<>();
for (Long parentRoleId : rolePrivilegeCollection.getParentRoleIds()) {
RolePrivilegeCollection parentRolePriv =
privilegeManager.getRolePrivilegeCollectionUnlocked(parentRoleId, true);
parentRoleNameList.add(parentRolePriv.getName());
List<String> info = Lists.newArrayList(showStmt.getRole(), null,
AstToSQLBuilder.toSQL(new GrantRoleStmt(parentRoleNameList, showStmt.getRole())));
infos.add(info);
}
Map<ObjectType, List<PrivilegeCollection.PrivilegeEntry>> typeToPrivilegeEntryList =
rolePrivilegeCollection.getTypeToPrivilegeEntryList();
for (Map.Entry<ObjectType, List<PrivilegeCollection.PrivilegeEntry>> typeToPrivilegeEntry
: typeToPrivilegeEntryList.entrySet()) {
for (PrivilegeCollection.PrivilegeEntry privilegeEntry : typeToPrivilegeEntry.getValue()) {
List<String> info = new ArrayList<>();
String roleName = showStmt.getRole();
info.add(roleName);
ObjectType objectType = typeToPrivilegeEntry.getKey();
if (objectType.equals(ObjectType.CATALOG)) {
CatalogPEntryObject catalogPEntryObject = (CatalogPEntryObject) privilegeEntry.getObject();
if (catalogPEntryObject.getId() == CatalogPEntryObject.ALL_CATALOG_ID) {
info.add(null);
} else {
List<Catalog> catalogs = new ArrayList<>(
GlobalStateMgr.getCurrentState().getCatalogMgr().getCatalogs().values());
Optional<Catalog> catalogOptional = catalogs.stream().filter(
catalog -> catalog.getId() == catalogPEntryObject.getId()
).findFirst();
if (!catalogOptional.isPresent()) {
throw new SemanticException("can't find catalog");
}
Catalog catalog = catalogOptional.get();
info.add(catalog.getName());
}
} else {
info.add("default");
}
GrantPrivilegeStmt grantPrivilegeStmt =
new GrantPrivilegeStmt(new ArrayList<>(), objectType.name(), roleName);
grantPrivilegeStmt.setObjectType(objectType);
ActionSet actionSet = privilegeEntry.getActionSet();
List<PrivilegeType> privList = privilegeManager.analyzeActionSet(objectType, actionSet);
grantPrivilegeStmt.setPrivilegeTypes(privList);
grantPrivilegeStmt.setObjectList(Lists.newArrayList(privilegeEntry.getObject()));
info.add(AstToStringBuilder.toString(grantPrivilegeStmt));
infos.add(info);
}
}
resultSet = new ShowResultSet(showStmt.getMetaData(), infos);
} else {
List<List<String>> infos = new ArrayList<>();
UserIdentity userIdentity = showStmt.getUserIdent();
UserPrivilegeCollection userPrivilegeCollection =
privilegeManager.getUserPrivilegeCollectionUnlocked(userIdentity);
Set<Long> allRoles = userPrivilegeCollection.getAllRoles();
if (!allRoles.isEmpty()) {
infos.add(Lists.newArrayList(userIdentity.toString(), null, AstToSQLBuilder.toSQL(
new GrantRoleStmt(allRoles.stream().map(roleId -> {
try {
return privilegeManager
.getRolePrivilegeCollectionUnlocked(roleId, true).getName();
} catch (PrivilegeException e) {
throw new RuntimeException(e);
}
}).collect(Collectors.toList()), userIdentity))));
}
Map<ObjectType, List<PrivilegeCollection.PrivilegeEntry>> typeToPrivilegeEntryList =
userPrivilegeCollection.getTypeToPrivilegeEntryList();
for (Map.Entry<ObjectType, List<PrivilegeCollection.PrivilegeEntry>> typeToPrivilegeEntry
: typeToPrivilegeEntryList.entrySet()) {
for (PrivilegeCollection.PrivilegeEntry privilegeEntry : typeToPrivilegeEntry.getValue()) {
List<String> info = new ArrayList<>();
info.add(userIdentity.toString());
ObjectType objectType = typeToPrivilegeEntry.getKey();
if (objectType.equals(ObjectType.CATALOG)) {
CatalogPEntryObject catalogPEntryObject = (CatalogPEntryObject) privilegeEntry.getObject();
if (catalogPEntryObject.getId() == CatalogPEntryObject.ALL_CATALOG_ID) {
info.add(null);
} else {
List<Catalog> catalogs = new ArrayList<>(
GlobalStateMgr.getCurrentState().getCatalogMgr().getCatalogs().values());
Optional<Catalog> catalogOptional = catalogs.stream().filter(
catalog -> catalog.getId() == catalogPEntryObject.getId()
).findFirst();
if (!catalogOptional.isPresent()) {
throw new SemanticException("can't find catalog");
}
Catalog catalog = catalogOptional.get();
info.add(catalog.getName());
}
} else {
info.add("default");
}
GrantPrivilegeStmt grantPrivilegeStmt =
new GrantPrivilegeStmt(new ArrayList<>(), objectType.name(), userIdentity);
grantPrivilegeStmt.setObjectType(objectType);
ActionSet actionSet = privilegeEntry.getActionSet();
List<PrivilegeType> privList = privilegeManager.analyzeActionSet(objectType, actionSet);
grantPrivilegeStmt.setPrivilegeTypes(privList);
grantPrivilegeStmt.setObjectList(Lists.newArrayList(privilegeEntry.getObject()));
info.add(AstToStringBuilder.toString(grantPrivilegeStmt));
infos.add(info);
}
}
resultSet = new ShowResultSet(showStmt.getMetaData(), infos);
}
} catch (PrivilegeException e) {
throw new SemanticException(e.getMessage());
}
} else {
List<List<String>> infos = GlobalStateMgr.getCurrentState().getAuth().getGrantsSQLs(showStmt.getUserIdent());
resultSet = new ShowResultSet(showStmt.getMetaData(), infos);
}
}
private void handleShowRoles() throws AnalysisException {
ShowRolesStmt showStmt = (ShowRolesStmt) stmt;
if (GlobalStateMgr.getCurrentState().isUsingNewPrivilege()) {
try {
List<List<String>> infos = new ArrayList<>();
PrivilegeManager privilegeManager = GlobalStateMgr.getCurrentState().getPrivilegeManager();
Map<Long, RolePrivilegeCollection> rolePrivilegeCollectionMap = privilegeManager.getRoleIdToPrivilegeCollection();
for (Map.Entry<Long, RolePrivilegeCollection> rolePrivilegeCollectionEntry
: rolePrivilegeCollectionMap.entrySet()) {
List<String> info = new ArrayList<>();
RolePrivilegeCollection rolePrivilegeCollection =
privilegeManager.getRolePrivilegeCollectionUnlocked(rolePrivilegeCollectionEntry.getKey(), true);
info.add(rolePrivilegeCollection.getName());
infos.add(info);
}
resultSet = new ShowResultSet(showStmt.getMetaData(), infos);
} catch (PrivilegeException e) {
throw new AnalysisException(e.getMessage());
}
} else {
List<List<String>> infos = GlobalStateMgr.getCurrentState().getAuth().getRoleInfo();
resultSet = new ShowResultSet(showStmt.getMetaData(), infos);
}
}
private void handleShowUser() {
List<List<String>> rowSet = Lists.newArrayList();
ShowUserStmt showUserStmt = (ShowUserStmt) stmt;
if (showUserStmt.isAll()) {
Set<UserIdentity> userIdentities =
GlobalStateMgr.getCurrentState().getPrivilegeManager().getUserToPrivilegeCollection().keySet();
for (UserIdentity userIdentity : userIdentities) {
List<String> row = Lists.newArrayList();
row.add(userIdentity.toString());
rowSet.add(row);
}
} else {
List<String> row = Lists.newArrayList();
row.add(connectContext.getCurrentUserIdentity().toString());
rowSet.add(row);
}
resultSet = new ShowResultSet(stmt.getMetaData(), rowSet);
}
private void handleAdminShowTabletStatus() throws AnalysisException {
AdminShowReplicaStatusStmt showStmt = (AdminShowReplicaStatusStmt) stmt;
List<List<String>> results;
try {
results = MetadataViewer.getTabletStatus(showStmt);
} catch (DdlException e) {
throw new AnalysisException(e.getMessage());
}
resultSet = new ShowResultSet(showStmt.getMetaData(), results);
}
private void handleAdminShowTabletDistribution() throws AnalysisException {
AdminShowReplicaDistributionStmt showStmt = (AdminShowReplicaDistributionStmt) stmt;
List<List<String>> results;
try {
results = MetadataViewer.getTabletDistribution(showStmt);
} catch (DdlException e) {
throw new AnalysisException(e.getMessage());
}
resultSet = new ShowResultSet(showStmt.getMetaData(), results);
}
private void handleAdminShowConfig() throws AnalysisException {
AdminShowConfigStmt showStmt = (AdminShowConfigStmt) stmt;
List<List<String>> results;
try {
PatternMatcher matcher = null;
if (showStmt.getPattern() != null) {
matcher = PatternMatcher.createMysqlPattern(showStmt.getPattern(),
CaseSensibility.CONFIG.getCaseSensibility());
}
results = ConfigBase.getConfigInfo(matcher);
Collections.sort(results, new Comparator<List<String>>() {
@Override
public int compare(List<String> o1, List<String> o2) {
return o1.get(0).compareTo(o2.get(0));
}
});
} catch (DdlException e) {
throw new AnalysisException(e.getMessage());
}
resultSet = new ShowResultSet(showStmt.getMetaData(), results);
}
private void handleShowSmallFiles() throws AnalysisException {
ShowSmallFilesStmt showStmt = (ShowSmallFilesStmt) stmt;
List<List<String>> results;
try {
results = GlobalStateMgr.getCurrentState().getSmallFileMgr().getInfo(showStmt.getDbName());
} catch (DdlException e) {
throw new AnalysisException(e.getMessage());
}
resultSet = new ShowResultSet(showStmt.getMetaData(), results);
}
private void handleShowDynamicPartition() {
ShowDynamicPartitionStmt showDynamicPartitionStmt = (ShowDynamicPartitionStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
Database db = connectContext.getGlobalStateMgr().getDb(showDynamicPartitionStmt.getDb());
if (db != null) {
db.readLock();
try {
for (Table tbl : db.getTables()) {
if (!(tbl instanceof OlapTable)) {
continue;
}
DynamicPartitionScheduler dynamicPartitionScheduler =
GlobalStateMgr.getCurrentState().getDynamicPartitionScheduler();
OlapTable olapTable = (OlapTable) tbl;
if (!olapTable.dynamicPartitionExists()) {
dynamicPartitionScheduler.removeRuntimeInfo(olapTable.getName());
continue;
}
if (GlobalStateMgr.getCurrentState().isUsingNewPrivilege()) {
if (!PrivilegeManager.checkAnyActionOnTable(ConnectContext.get(),
db.getFullName(), olapTable.getName())) {
continue;
}
} else {
if (!GlobalStateMgr.getCurrentState().getAuth().checkTblPriv(ConnectContext.get(),
db.getFullName(), olapTable.getName(),
PrivPredicate.SHOW)) {
continue;
}
}
DynamicPartitionProperty dynamicPartitionProperty =
olapTable.getTableProperty().getDynamicPartitionProperty();
String tableName = olapTable.getName();
int replicationNum = dynamicPartitionProperty.getReplicationNum();
replicationNum = (replicationNum == DynamicPartitionProperty.NOT_SET_REPLICATION_NUM) ?
olapTable.getDefaultReplicationNum() : FeConstants.default_replication_num;
rows.add(Lists.newArrayList(
tableName,
String.valueOf(dynamicPartitionProperty.getEnable()),
dynamicPartitionProperty.getTimeUnit().toUpperCase(),
String.valueOf(dynamicPartitionProperty.getStart()),
String.valueOf(dynamicPartitionProperty.getEnd()),
dynamicPartitionProperty.getPrefix(),
String.valueOf(dynamicPartitionProperty.getBuckets()),
String.valueOf(replicationNum),
dynamicPartitionProperty.getStartOfInfo(),
dynamicPartitionScheduler
.getRuntimeInfo(tableName, DynamicPartitionScheduler.LAST_UPDATE_TIME),
dynamicPartitionScheduler
.getRuntimeInfo(tableName, DynamicPartitionScheduler.LAST_SCHEDULER_TIME),
dynamicPartitionScheduler
.getRuntimeInfo(tableName, DynamicPartitionScheduler.DYNAMIC_PARTITION_STATE),
dynamicPartitionScheduler
.getRuntimeInfo(tableName, DynamicPartitionScheduler.CREATE_PARTITION_MSG),
dynamicPartitionScheduler
.getRuntimeInfo(tableName, DynamicPartitionScheduler.DROP_PARTITION_MSG)));
}
} finally {
db.readUnlock();
}
resultSet = new ShowResultSet(showDynamicPartitionStmt.getMetaData(), rows);
}
}
private void handleShowTransaction() throws AnalysisException {
ShowTransactionStmt showStmt = (ShowTransactionStmt) stmt;
Database db = connectContext.getGlobalStateMgr().getDb(showStmt.getDbName());
MetaUtils.checkDbNullAndReport(db, showStmt.getDbName());
long txnId = showStmt.getTxnId();
GlobalTransactionMgr transactionMgr = GlobalStateMgr.getCurrentGlobalTransactionMgr();
resultSet = new ShowResultSet(showStmt.getMetaData(), transactionMgr.getSingleTranInfo(db.getId(), txnId));
}
private void handleShowPlugins() throws AnalysisException {
ShowPluginsStmt pluginsStmt = (ShowPluginsStmt) stmt;
List<List<String>> rows = GlobalStateMgr.getCurrentPluginMgr().getPluginShowInfos();
resultSet = new ShowResultSet(pluginsStmt.getMetaData(), rows);
}
private void handleShowSqlBlackListStmt() throws AnalysisException {
ShowSqlBlackListStmt showStmt = (ShowSqlBlackListStmt) stmt;
List<List<String>> rows = new ArrayList<List<String>>();
for (Map.Entry<String, BlackListSql> entry : SqlBlackList.getInstance().sqlBlackListMap.entrySet()) {
List<String> oneSql = new ArrayList<String>();
oneSql.add(String.valueOf(entry.getValue().id));
oneSql.add(entry.getKey());
rows.add(oneSql);
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowAnalyzeJob() {
List<AnalyzeJob> jobs = connectContext.getGlobalStateMgr().getAnalyzeManager().getAllAnalyzeJobList();
List<List<String>> rows = Lists.newArrayList();
jobs.sort(Comparator.comparing(AnalyzeJob::getId));
for (AnalyzeJob job : jobs) {
try {
List<String> result = ShowAnalyzeJobStmt.showAnalyzeJobs(connectContext, job);
if (result != null) {
rows.add(result);
}
} catch (MetaNotFoundException e) {
}
}
rows = doPredicate(stmt, stmt.getMetaData(), rows);
resultSet = new ShowResultSet(stmt.getMetaData(), rows);
}
private void handleShowAnalyzeStatus() {
List<AnalyzeStatus> statuses = new ArrayList<>(connectContext.getGlobalStateMgr().getAnalyzeManager()
.getAnalyzeStatusMap().values());
List<List<String>> rows = Lists.newArrayList();
statuses.sort(Comparator.comparing(AnalyzeStatus::getId));
for (AnalyzeStatus status : statuses) {
try {
List<String> result = ShowAnalyzeStatusStmt.showAnalyzeStatus(connectContext, status);
if (result != null) {
rows.add(result);
}
} catch (MetaNotFoundException e) {
}
}
rows = doPredicate(stmt, stmt.getMetaData(), rows);
resultSet = new ShowResultSet(stmt.getMetaData(), rows);
}
private void handleShowBasicStatsMeta() {
List<BasicStatsMeta> metas = new ArrayList<>(connectContext.getGlobalStateMgr().getAnalyzeManager()
.getBasicStatsMetaMap().values());
List<List<String>> rows = Lists.newArrayList();
for (BasicStatsMeta meta : metas) {
try {
List<String> result = ShowBasicStatsMetaStmt.showBasicStatsMeta(connectContext, meta);
if (result != null) {
rows.add(result);
}
} catch (MetaNotFoundException e) {
}
}
rows = doPredicate(stmt, stmt.getMetaData(), rows);
resultSet = new ShowResultSet(stmt.getMetaData(), rows);
}
private void handleShowHistogramStatsMeta() {
List<HistogramStatsMeta> metas = new ArrayList<>(connectContext.getGlobalStateMgr().getAnalyzeManager()
.getHistogramStatsMetaMap().values());
List<List<String>> rows = Lists.newArrayList();
for (HistogramStatsMeta meta : metas) {
try {
List<String> result = ShowHistogramStatsMetaStmt.showHistogramStatsMeta(connectContext, meta);
if (result != null) {
rows.add(result);
}
} catch (MetaNotFoundException e) {
}
}
rows = doPredicate(stmt, stmt.getMetaData(), rows);
resultSet = new ShowResultSet(stmt.getMetaData(), rows);
}
private void handleShowResourceGroup() throws AnalysisException {
ShowResourceGroupStmt showResourceGroupStmt = (ShowResourceGroupStmt) stmt;
List<List<String>> rows =
GlobalStateMgr.getCurrentState().getResourceGroupMgr().showResourceGroup(showResourceGroupStmt);
resultSet = new ShowResultSet(showResourceGroupStmt.getMetaData(), rows);
}
private void handleShowCatalogs() {
ShowCatalogsStmt showCatalogsStmt = (ShowCatalogsStmt) stmt;
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
CatalogMgr catalogMgr = globalStateMgr.getCatalogMgr();
List<List<String>> rowSet = catalogMgr.getCatalogsInfo().stream()
.filter(row -> {
if (globalStateMgr.isUsingNewPrivilege()) {
return PrivilegeManager.checkAnyActionOnCatalog(connectContext, row.get(0));
} else {
return true;
}
}
)
.sorted(Comparator.comparing(o -> o.get(0))).collect(Collectors.toList());
resultSet = new ShowResultSet(showCatalogsStmt.getMetaData(), rowSet);
}
private void handleShowWarehouses() {
ShowWarehousesStmt showStmt = (ShowWarehousesStmt) stmt;
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
WarehouseManager warehouseMgr = globalStateMgr.getWarehouseMgr();
List<List<String>> rowSet = warehouseMgr.getWarehousesInfo().stream()
.sorted(Comparator.comparing(o -> o.get(0))).collect(Collectors.toList());
resultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);
}
private void handleShowClusters() {
ShowClustersStmt showStmt = (ShowClustersStmt) stmt;
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
WarehouseManager warehouseMgr = globalStateMgr.getWarehouseMgr();
Warehouse warehouse = warehouseMgr.getWarehouse(showStmt.getWarehouseName());
List<List<String>> rowSet = warehouse.getClustersInfo().stream()
.sorted(Comparator.comparing(o -> o.get(0))).collect(Collectors.toList());
resultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);
}
private List<List<String>> doPredicate(ShowStmt showStmt,
ShowResultSetMetaData showResultSetMetaData,
List<List<String>> rows) {
Predicate predicate = showStmt.getPredicate();
if (predicate == null) {
return rows;
}
SlotRef slotRef = (SlotRef) predicate.getChild(0);
StringLiteral stringLiteral = (StringLiteral) predicate.getChild(1);
List<List<String>> returnRows = new ArrayList<>();
BinaryPredicate binaryPredicate = (BinaryPredicate) predicate;
int idx = showResultSetMetaData.getColumnIdx(slotRef.getColumnName());
if (binaryPredicate.getOp().isEquivalence()) {
for (List<String> row : rows) {
if (row.get(idx).equals(stringLiteral.getStringValue())) {
returnRows.add(row);
}
}
}
return returnRows;
}
private void handleShowCreateExternalCatalog() {
ShowCreateExternalCatalogStmt showStmt = (ShowCreateExternalCatalogStmt) stmt;
String catalogName = showStmt.getCatalogName();
Catalog catalog = connectContext.getGlobalStateMgr().getCatalogMgr().getCatalogByName(catalogName);
List<List<String>> rows = Lists.newArrayList();
if (InternalCatalog.DEFAULT_INTERNAL_CATALOG_NAME.equalsIgnoreCase(catalogName)) {
resultSet = new ShowResultSet(stmt.getMetaData(), rows);
return;
}
StringBuilder createCatalogSql = new StringBuilder();
createCatalogSql.append("CREATE EXTERNAL CATALOG ")
.append("`").append(catalogName).append("`")
.append("\n");
String comment = catalog.getComment();
if (comment != null) {
createCatalogSql.append("comment \"").append(catalog.getComment()).append("\"\n");
}
createCatalogSql.append("PROPERTIES (")
.append(new PrintableMap<>(catalog.getConfig(), " = ", true, true))
.append("\n)");
rows.add(Lists.newArrayList(catalogName, createCatalogSql.toString()));
resultSet = new ShowResultSet(stmt.getMetaData(), rows);
}
} |
dockerImage is the full string e.g. "docker-registry.ops.yahoo.com:4443/vespa/ci:6.96.12", so you need to parse it | private static ClusterMembership readMembership(Inspector object) {
return ClusterMembership.from(object.field(hostSpecMembership).asString(),
object.field(hostSpecVespaVersion).valid() ?
com.yahoo.component.Version.fromString(object.field(hostSpecVespaVersion).asString()) :
( object.field(dockerImage).valid() ? com.yahoo.component.Version.fromString(object.field(dockerImage).asString()) : Vtag.currentVersion));
} | } | private static ClusterMembership readMembership(Inspector object) {
return ClusterMembership.from(object.field(hostSpecMembership).asString(),
object.field(hostSpecVespaVersion).valid() ?
com.yahoo.component.Version.fromString(object.field(hostSpecVespaVersion).asString()) :
( object.field(dockerImage).valid() ? new DockerImage(object.field(dockerImage).asString()).tagAsVersion() : Vtag.currentVersion));
} | class ProvisionInfo {
private static final String mappingKey = "mapping";
private static final String hostSpecKey = "hostSpec";
private static final String hostSpecHostName = "hostName";
private static final String hostSpecMembership = "membership";
private static final String hostSpecFlavor = "flavor";
private static final String hostSpecVespaVersion = "vespaVersion";
private static final String dockerImage = "dockerImage";
private final Set<HostSpec> hosts = new LinkedHashSet<>();
private ProvisionInfo(Set<HostSpec> hosts) {
this.hosts.addAll(hosts);
}
public static ProvisionInfo withHosts(Set<HostSpec> hosts) {
return new ProvisionInfo(hosts);
}
private void toSlime(Cursor cursor) {
Cursor array = cursor.setArray(mappingKey);
for (HostSpec host : hosts) {
Cursor object = array.addObject();
serializeHostSpec(object.setObject(hostSpecKey), host);
}
}
private void serializeHostSpec(Cursor cursor, HostSpec host) {
cursor.setString(hostSpecHostName, host.hostname());
if (host.membership().isPresent()) {
cursor.setString(hostSpecMembership, host.membership().get().stringValue());
cursor.setString(hostSpecVespaVersion, host.membership().get().cluster().vespaVersion().toString());
}
if (host.flavor().isPresent())
cursor.setString(hostSpecFlavor, host.flavor().get().name());
}
public Set<HostSpec> getHosts() {
return Collections.unmodifiableSet(hosts);
}
private static ProvisionInfo fromSlime(Inspector inspector, Optional<NodeFlavors> nodeFlavors) {
Inspector array = inspector.field(mappingKey);
Set<HostSpec> hosts = new LinkedHashSet<>();
array.traverse(new ArrayTraverser() {
@Override
public void entry(int i, Inspector inspector) {
hosts.add(deserializeHostSpec(inspector.field(hostSpecKey), nodeFlavors));
}
});
return new ProvisionInfo(hosts);
}
private static HostSpec deserializeHostSpec(Inspector object, Optional<NodeFlavors> nodeFlavors) {
Optional<ClusterMembership> membership =
object.field(hostSpecMembership).valid() ? Optional.of(readMembership(object)) : Optional.empty();
Optional<Flavor> flavor =
object.field(hostSpecFlavor).valid() ? readFlavor(object, nodeFlavors) : Optional.empty();
return new HostSpec(object.field(hostSpecHostName).asString(),Collections.emptyList(), flavor, membership);
}
private static Optional<Flavor> readFlavor(Inspector object, Optional<NodeFlavors> nodeFlavors) {
return nodeFlavors.map(flavorMapper -> flavorMapper.getFlavor(object.field(hostSpecFlavor).asString()))
.orElse(Optional.empty());
}
public byte[] toJson() throws IOException {
Slime slime = new Slime();
toSlime(slime.setObject());
return SlimeUtils.toJsonBytes(slime);
}
public static ProvisionInfo fromJson(byte[] json, Optional<NodeFlavors> nodeFlavors) {
return fromSlime(SlimeUtils.jsonToSlime(json).get(), nodeFlavors);
}
public ProvisionInfo merge(ProvisionInfo provisionInfo) {
Set<HostSpec> mergedSet = new LinkedHashSet<>();
mergedSet.addAll(this.hosts);
mergedSet.addAll(provisionInfo.getHosts());
return ProvisionInfo.withHosts(mergedSet);
}
} | class ProvisionInfo {
private static final String mappingKey = "mapping";
private static final String hostSpecKey = "hostSpec";
private static final String hostSpecHostName = "hostName";
private static final String hostSpecMembership = "membership";
private static final String hostSpecFlavor = "flavor";
private static final String hostSpecVespaVersion = "vespaVersion";
private static final String dockerImage = "dockerImage";
private final Set<HostSpec> hosts = new LinkedHashSet<>();
private ProvisionInfo(Set<HostSpec> hosts) {
this.hosts.addAll(hosts);
}
public static ProvisionInfo withHosts(Set<HostSpec> hosts) {
return new ProvisionInfo(hosts);
}
private void toSlime(Cursor cursor) {
Cursor array = cursor.setArray(mappingKey);
for (HostSpec host : hosts) {
Cursor object = array.addObject();
serializeHostSpec(object.setObject(hostSpecKey), host);
}
}
private void serializeHostSpec(Cursor cursor, HostSpec host) {
cursor.setString(hostSpecHostName, host.hostname());
if (host.membership().isPresent()) {
cursor.setString(hostSpecMembership, host.membership().get().stringValue());
cursor.setString(hostSpecVespaVersion, host.membership().get().cluster().vespaVersion().toString());
}
if (host.flavor().isPresent())
cursor.setString(hostSpecFlavor, host.flavor().get().name());
}
public Set<HostSpec> getHosts() {
return Collections.unmodifiableSet(hosts);
}
private static ProvisionInfo fromSlime(Inspector inspector, Optional<NodeFlavors> nodeFlavors) {
Inspector array = inspector.field(mappingKey);
Set<HostSpec> hosts = new LinkedHashSet<>();
array.traverse(new ArrayTraverser() {
@Override
public void entry(int i, Inspector inspector) {
hosts.add(deserializeHostSpec(inspector.field(hostSpecKey), nodeFlavors));
}
});
return new ProvisionInfo(hosts);
}
private static HostSpec deserializeHostSpec(Inspector object, Optional<NodeFlavors> nodeFlavors) {
Optional<ClusterMembership> membership =
object.field(hostSpecMembership).valid() ? Optional.of(readMembership(object)) : Optional.empty();
Optional<Flavor> flavor =
object.field(hostSpecFlavor).valid() ? readFlavor(object, nodeFlavors) : Optional.empty();
return new HostSpec(object.field(hostSpecHostName).asString(),Collections.emptyList(), flavor, membership);
}
private static Optional<Flavor> readFlavor(Inspector object, Optional<NodeFlavors> nodeFlavors) {
return nodeFlavors.map(flavorMapper -> flavorMapper.getFlavor(object.field(hostSpecFlavor).asString()))
.orElse(Optional.empty());
}
public byte[] toJson() throws IOException {
Slime slime = new Slime();
toSlime(slime.setObject());
return SlimeUtils.toJsonBytes(slime);
}
public static ProvisionInfo fromJson(byte[] json, Optional<NodeFlavors> nodeFlavors) {
return fromSlime(SlimeUtils.jsonToSlime(json).get(), nodeFlavors);
}
public ProvisionInfo merge(ProvisionInfo provisionInfo) {
Set<HostSpec> mergedSet = new LinkedHashSet<>();
mergedSet.addAll(this.hosts);
mergedSet.addAll(provisionInfo.getHosts());
return ProvisionInfo.withHosts(mergedSet);
}
} |
can replace `types.get(i)` with `node`. | public BLangNode transform(TupleTypeDescriptorNode tupleTypeDescriptorNode) {
BLangTupleTypeNode tupleTypeNode = (BLangTupleTypeNode) TreeBuilder.createTupleTypeNode();
SeparatedNodeList<Node> types = tupleTypeDescriptorNode.memberTypeDesc();
for (int i = 0; i < types.size(); i++) {
Node node = types.get(i);
if (node.kind() == SyntaxKind.REST_TYPE) {
tupleTypeNode.restParamType = createTypeNode(types.get(i));
} else {
tupleTypeNode.memberTypeNodes.add(createTypeNode(types.get(i)));
}
}
tupleTypeNode.pos = getPosition(tupleTypeDescriptorNode);
return tupleTypeNode;
} | tupleTypeNode.memberTypeNodes.add(createTypeNode(types.get(i))); | public BLangNode transform(TupleTypeDescriptorNode tupleTypeDescriptorNode) {
BLangTupleTypeNode tupleTypeNode = (BLangTupleTypeNode) TreeBuilder.createTupleTypeNode();
SeparatedNodeList<Node> types = tupleTypeDescriptorNode.memberTypeDesc();
for (int i = 0; i < types.size(); i++) {
Node node = types.get(i);
if (node.kind() == SyntaxKind.REST_TYPE) {
RestDescriptorNode restDescriptor = (RestDescriptorNode) node;
tupleTypeNode.restParamType = createTypeNode(restDescriptor.typeDescriptor());
} else {
tupleTypeNode.memberTypeNodes.add(createTypeNode(node));
}
}
tupleTypeNode.pos = getPosition(tupleTypeDescriptorNode);
return tupleTypeNode;
} | class BLangNodeTransformer extends NodeTransformer<BLangNode> {
private static final String IDENTIFIER_LITERAL_PREFIX = "'";
private BLangDiagnosticLogHelper dlog;
private SymbolTable symTable;
private BDiagnosticSource diagnosticSource;
private static final Pattern UNICODE_PATTERN = Pattern.compile(Constants.UNICODE_REGEX);
private BLangAnonymousModelHelper anonymousModelHelper;
/* To keep track of additional top-level nodes produced from multi-BLangNode resultant transformations */
private Stack<TopLevelNode> additionalTopLevelNodes = new Stack<>();
/* To keep track of additional statements produced from multi-BLangNode resultant transformations */
private Stack<BLangStatement> additionalStatements = new Stack<>();
public BLangNodeTransformer(CompilerContext context, BDiagnosticSource diagnosticSource) {
this.dlog = BLangDiagnosticLogHelper.getInstance(context);
this.symTable = SymbolTable.getInstance(context);
this.diagnosticSource = diagnosticSource;
this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context);
}
public List<org.ballerinalang.model.tree.Node> accept(Node node) {
BLangNode bLangNode = node.apply(this);
List<org.ballerinalang.model.tree.Node> nodes = new ArrayList<>();
while (!additionalTopLevelNodes.empty()) {
nodes.add(additionalTopLevelNodes.pop());
}
while (!additionalStatements.empty()) {
nodes.add(additionalStatements.pop());
}
nodes.add(bLangNode);
return nodes;
}
@Override
public BLangNode transform(IdentifierToken identifierToken) {
return this.createIdentifier(getPosition(identifierToken), identifierToken.text());
}
private DiagnosticPos getPosition(Node node) {
if (node == null) {
return null;
}
LineRange lineRange = node.lineRange();
LinePosition startPos = lineRange.startLine();
LinePosition endPos = lineRange.endLine();
return new DiagnosticPos(diagnosticSource, startPos.line() + 1, endPos.line() + 1,
startPos.offset() + 1, endPos.offset() + 1);
}
@Override
public BLangNode transform(ModulePartNode modulePart) {
BLangCompilationUnit compilationUnit = (BLangCompilationUnit) TreeBuilder.createCompilationUnit();
compilationUnit.name = diagnosticSource.cUnitName;
DiagnosticPos pos = getPosition(modulePart);
for (ImportDeclarationNode importDecl : modulePart.imports()) {
BLangImportPackage bLangImport = (BLangImportPackage) importDecl.apply(this);
bLangImport.compUnit = this.createIdentifier(pos, compilationUnit.getName());
compilationUnit.addTopLevelNode(bLangImport);
}
for (ModuleMemberDeclarationNode member : modulePart.members()) {
compilationUnit.addTopLevelNode((TopLevelNode) member.apply(this));
}
while (!this.additionalTopLevelNodes.empty()) {
compilationUnit.addTopLevelNode(this.additionalTopLevelNodes.pop());
}
compilationUnit.pos = pos;
return compilationUnit;
}
@Override
public BLangNode transform(ModuleVariableDeclarationNode modVarDeclrNode) {
BLangSimpleVariable simpleVar = createSimpleVar(modVarDeclrNode.variableName(),
modVarDeclrNode.typeName(), modVarDeclrNode.initializer(),
modVarDeclrNode.finalKeyword().isPresent(), false, null);
simpleVar.pos = getPosition(modVarDeclrNode);
return simpleVar;
}
@Override
public BLangNode transform(ImportDeclarationNode importDeclaration) {
Node orgNameNode = importDeclaration.orgName().orElse(null);
Node versionNode = importDeclaration.version().orElse(null);
Node prefixNode = importDeclaration.prefix().orElse(null);
String orgName = null;
if (orgNameNode != null) {
ImportOrgNameNode importOrgName = (ImportOrgNameNode) orgNameNode;
orgName = importOrgName.orgName().text();
}
String version = null;
if (versionNode != null) {
version = ((ImportVersionNode) versionNode).versionNumber().toString();
}
String prefix = null;
if (prefixNode != null) {
prefix = ((ImportPrefixNode) prefixNode).prefix().toString();
}
List<BLangIdentifier> pkgNameComps = new ArrayList<>();
NodeList<IdentifierToken> names = importDeclaration.moduleName();
names.forEach(name -> pkgNameComps.add(this.createIdentifier(getPosition(name), name.text(), null)));
BLangImportPackage importDcl = (BLangImportPackage) TreeBuilder.createImportPackageNode();
importDcl.pos = getPosition(importDeclaration);
importDcl.pkgNameComps = pkgNameComps;
importDcl.orgName = this.createIdentifier(getPosition(orgNameNode), orgName);
importDcl.version = this.createIdentifier(getPosition(versionNode), version);
importDcl.alias = (prefix != null && !prefix.isEmpty()) ? this.createIdentifier(getPosition(prefixNode), prefix,
null) :
pkgNameComps.get(pkgNameComps.size() - 1);
return importDcl;
}
public BLangNode transform(TypeDefinitionNode typeDefNode) {
BLangTypeDefinition typeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
BLangIdentifier identifierNode = this.createIdentifier(getPosition(typeDefNode.typeName()),
typeDefNode.typeName().text());
typeDef.setName(identifierNode);
BLangStructureTypeNode structTypeNode = (BLangStructureTypeNode) typeDefNode.typeDescriptor().apply(this);
structTypeNode.isAnonymous = false;
structTypeNode.isLocal = false;
typeDef.typeNode = structTypeNode;
typeDefNode.visibilityQualifier().ifPresent(visibilityQual -> {
if (visibilityQual.kind() == SyntaxKind.PUBLIC_KEYWORD) {
typeDef.flagSet.add(Flag.PUBLIC);
}
});
typeDef.pos = getPosition(typeDefNode);
return typeDef;
}
@Override
public BLangNode transform(UnionTypeDescriptorNode unionTypeDescriptorNode) {
BLangType rhsTypeNode = createTypeNode(unionTypeDescriptorNode.rightTypeDesc());
BLangType lhsTypeNode = createTypeNode(unionTypeDescriptorNode.leftTypeDesc());
return addUnionType(lhsTypeNode, rhsTypeNode, getPosition(unionTypeDescriptorNode));
}
@Override
public BLangNode transform(ParenthesisedTypeDescriptorNode parenthesisedTypeDescriptorNode) {
BLangType typeNode = createTypeNode(parenthesisedTypeDescriptorNode.typedesc());
typeNode.grouped = true;
return typeNode;
}
@Override
@Override
public BLangNode transform(ErrorTypeDescriptorNode errorTypeDescriptorNode) {
BLangErrorType errorType = (BLangErrorType) TreeBuilder.createErrorTypeNode();
return errorType;
}
@Override
public BLangNode transform(ObjectTypeDescriptorNode objTypeDescNode) {
BLangObjectTypeNode objectTypeNode = (BLangObjectTypeNode) TreeBuilder.createObjectTypeNode();
for (Token qualifier : objTypeDescNode.objectTypeQualifiers()) {
if (qualifier.kind() == SyntaxKind.ABSTRACT_KEYWORD) {
objectTypeNode.flagSet.add(Flag.ABSTRACT);
}
if (qualifier.kind() == SyntaxKind.CLIENT_KEYWORD) {
objectTypeNode.flagSet.add(Flag.CLIENT);
}
if (qualifier.kind() == SyntaxKind.SERVICE_KEYWORD) {
objectTypeNode.flagSet.add(Flag.SERVICE);
}
}
for (Node node : objTypeDescNode.members()) {
BLangNode bLangNode = node.apply(this);
if (bLangNode.getKind() == NodeKind.FUNCTION) {
BLangFunction bLangFunction = (BLangFunction) bLangNode;
bLangFunction.attachedFunction = true;
if (Names.USER_DEFINED_INIT_SUFFIX.value.equals(bLangFunction.name.value)) {
bLangFunction.objInitFunction = true;
objectTypeNode.initFunction = bLangFunction;
} else {
objectTypeNode.addFunction(bLangFunction);
}
} else if (bLangNode.getKind() == NodeKind.VARIABLE) {
objectTypeNode.addField((BLangSimpleVariable) bLangNode);
}
}
objectTypeNode.isAnonymous = false;
objectTypeNode.pos = getPosition(objTypeDescNode);
return objectTypeNode;
}
@Override
public BLangNode transform(ObjectFieldNode objFieldNode) {
BLangSimpleVariable simpleVar = createSimpleVar(objFieldNode.fieldName(), objFieldNode.typeName(),
objFieldNode.expression(),
false, false, objFieldNode.visibilityQualifier());
simpleVar.pos = getPosition(objFieldNode);
return simpleVar;
}
@Override
public BLangNode transform(ServiceDeclarationNode serviceDeclrNode) {
return createService(serviceDeclrNode, serviceDeclrNode.serviceName(), false);
}
private BLangNode createService(ServiceDeclarationNode serviceDeclrNode, IdentifierToken serviceNameNode,
boolean isAnonServiceValue) {
BLangService bLService = (BLangService) TreeBuilder.createServiceNode();
bLService.isAnonymousServiceValue = isAnonServiceValue;
DiagnosticPos pos = getPosition(serviceDeclrNode);
String serviceName;
DiagnosticPos identifierPos;
if (isAnonServiceValue) {
serviceName = this.anonymousModelHelper.getNextAnonymousServiceVarKey(diagnosticSource.pkgID);
identifierPos = pos;
} else {
serviceName = serviceNameNode.text();
identifierPos = getPosition(serviceNameNode);
}
String serviceTypeName = this.anonymousModelHelper.getNextAnonymousServiceTypeKey(diagnosticSource.pkgID,
serviceName);
BLangIdentifier serviceVar = createIdentifier(identifierPos, serviceName);
serviceVar.pos = identifierPos;
bLService.setName(serviceVar);
if (!isAnonServiceValue) {
}
this.additionalTopLevelNodes.add(bLService);
BLangTypeDefinition bLTypeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
BLangIdentifier serviceTypeID = createIdentifier(identifierPos, serviceTypeName);
serviceTypeID.pos = pos;
bLTypeDef.setName(serviceTypeID);
bLTypeDef.flagSet.add(Flag.SERVICE);
bLTypeDef.typeNode = (BLangType) serviceDeclrNode.serviceBody().apply(this);
bLTypeDef.pos = pos;
bLService.serviceTypeDefinition = bLTypeDef;
final BLangServiceConstructorExpr serviceConstNode = (BLangServiceConstructorExpr) TreeBuilder
.createServiceConstructorNode();
serviceConstNode.serviceNode = bLService;
serviceConstNode.pos = pos;
bLService.pos = pos;
if (!isAnonServiceValue) {
BLangSimpleVariable var = (BLangSimpleVariable) createBasicVarNodeWithoutType(identifierPos,
Collections.emptySet(),
serviceName, identifierPos,
serviceConstNode);
var.flagSet.add(Flag.FINAL);
var.flagSet.add(Flag.SERVICE);
BLangUserDefinedType bLUserDefinedType = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode();
bLUserDefinedType.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
bLUserDefinedType.typeName = bLTypeDef.name;
bLUserDefinedType.pos = pos;
var.typeNode = bLUserDefinedType;
bLService.variableNode = var;
this.additionalTopLevelNodes.add(bLTypeDef);
return var;
} else {
return bLTypeDef;
}
}
@Override
public BLangNode transform(ServiceBodyNode serviceBodyNode) {
BLangObjectTypeNode objectTypeNode = (BLangObjectTypeNode) TreeBuilder.createObjectTypeNode();
objectTypeNode.flagSet.add(Flag.SERVICE);
for (Node resourceNode : serviceBodyNode.resources()) {
BLangNode bLangNode = resourceNode.apply(this);
if (bLangNode.getKind() == NodeKind.FUNCTION) {
BLangFunction bLangFunction = (BLangFunction) bLangNode;
bLangFunction.attachedFunction = true;
objectTypeNode.addFunction(bLangFunction);
}
}
objectTypeNode.isAnonymous = false;
objectTypeNode.pos = getPosition(serviceBodyNode);
return objectTypeNode;
}
@Override
public BLangNode transform(RecordTypeDescriptorNode recordTypeDescriptorNode) {
BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) TreeBuilder.createRecordTypeNode();
boolean hasRestField = false;
for (Node field : recordTypeDescriptorNode.fields()) {
if (field.kind() == SyntaxKind.RECORD_FIELD || field.kind() == SyntaxKind.RECORD_FIELD_WITH_DEFAULT_VALUE) {
recordTypeNode.fields.add((BLangSimpleVariable) field.apply(this));
} else if (field.kind() == SyntaxKind.RECORD_REST_TYPE) {
recordTypeNode.restFieldType = (BLangValueType) field.apply(this);
hasRestField = true;
} else if (field.kind() == SyntaxKind.TYPE_REFERENCE) {
recordTypeNode.addTypeReference((BLangType) field.apply(this));
}
}
recordTypeNode.sealed = !hasRestField;
recordTypeNode.pos = getPosition(recordTypeDescriptorNode);
return recordTypeNode;
}
@Override
public BLangNode transform(TypeReferenceNode typeReferenceNode) {
return createTypeNode(typeReferenceNode.typeName());
}
@Override
public BLangNode transform(RecordFieldNode recordFieldNode) {
BLangSimpleVariable simpleVar = createSimpleVar(recordFieldNode.fieldName(), recordFieldNode.typeName());
simpleVar.flagSet.add(Flag.PUBLIC);
if (recordFieldNode.questionMarkToken().isPresent()) {
simpleVar.flagSet.add(Flag.OPTIONAL);
} else {
simpleVar.flagSet.add(Flag.REQUIRED);
}
simpleVar.pos = getPosition(recordFieldNode);
return simpleVar;
}
@Override
public BLangNode transform(RecordFieldWithDefaultValueNode recordFieldNode) {
BLangSimpleVariable simpleVar = createSimpleVar(recordFieldNode.fieldName(), recordFieldNode.typeName());
simpleVar.flagSet.add(Flag.PUBLIC);
if (isPresent(recordFieldNode.expression())) {
simpleVar.setInitialExpression(createExpression(recordFieldNode.expression()));
}
simpleVar.pos = getPosition(recordFieldNode);
return simpleVar;
}
@Override
public BLangNode transform(RecordRestDescriptorNode recordFieldNode) {
return createTypeNode(recordFieldNode.typeName());
}
@Override
public BLangNode transform(FunctionDefinitionNode funcDefNode) {
BLangFunction bLFunction = (BLangFunction) TreeBuilder.createFunctionNode();
IdentifierToken funcName = funcDefNode.functionName();
bLFunction.name = createIdentifier(getPosition(funcName), funcName.text());
funcDefNode.visibilityQualifier().ifPresent(visibilityQual -> {
if (visibilityQual.kind() == SyntaxKind.PUBLIC_KEYWORD) {
bLFunction.flagSet.add(Flag.PUBLIC);
} else if (visibilityQual.kind() == SyntaxKind.PRIVATE_KEYWORD) {
bLFunction.flagSet.add(Flag.PRIVATE);
} else if (visibilityQual.kind() == SyntaxKind.REMOTE_KEYWORD) {
bLFunction.flagSet.add(Flag.REMOTE);
} else if (visibilityQual.kind() == SyntaxKind.RESOURCE_KEYWORD) {
bLFunction.flagSet.add(Flag.RESOURCE);
}
});
populateFuncSignature(bLFunction, funcDefNode.functionSignature());
if (funcDefNode.functionBody() == null) {
bLFunction.body = null;
bLFunction.flagSet.add(Flag.INTERFACE);
bLFunction.interfaceFunction = true;
} else {
bLFunction.body = (BLangFunctionBody) funcDefNode.functionBody().apply(this);
if (bLFunction.body.getKind() == NodeKind.EXTERN_FUNCTION_BODY) {
bLFunction.flagSet.add(Flag.NATIVE);
}
}
bLFunction.pos = getPosition(funcDefNode);
return bLFunction;
}
@Override
public BLangNode transform(ExplicitAnonymousFunctionExpressionNode anonFuncExprNode) {
BLangFunction bLFunction = (BLangFunction) TreeBuilder.createFunctionNode();
DiagnosticPos pos = getPosition(anonFuncExprNode);
bLFunction.name = createIdentifier(pos,
anonymousModelHelper.getNextAnonymousFunctionKey(diagnosticSource.pkgID));
populateFuncSignature(bLFunction, anonFuncExprNode.functionSignature());
bLFunction.body = (BLangFunctionBody) anonFuncExprNode.functionBody().apply(this);
bLFunction.pos = pos;
bLFunction.addFlag(Flag.LAMBDA);
bLFunction.addFlag(Flag.ANONYMOUS);
this.additionalTopLevelNodes.add(bLFunction);
BLangLambdaFunction lambdaExpr = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaExpr.function = bLFunction;
lambdaExpr.pos = pos;
return lambdaExpr;
}
@Override
public BLangNode transform(FunctionBodyBlockNode functionBodyBlockNode) {
BLangBlockFunctionBody bLFuncBody = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
List<BLangStatement> statements = new ArrayList<>();
for (StatementNode statement : functionBodyBlockNode.statements()) {
if (statement != null) {
statements.add((BLangStatement) statement.apply(this));
}
}
if (functionBodyBlockNode.namedWorkerDeclarator().isPresent()) {
NamedWorkerDeclarator namedWorkerDeclarator = functionBodyBlockNode.namedWorkerDeclarator().get();
for (StatementNode statement : namedWorkerDeclarator.workerInitStatements()) {
statements.add((BLangStatement) statement.apply(this));
}
for (NamedWorkerDeclarationNode workerDeclarationNode : namedWorkerDeclarator.namedWorkerDeclarations()) {
statements.add((BLangStatement) workerDeclarationNode.apply(this));
while (!this.additionalStatements.empty()) {
statements.add(additionalStatements.pop());
}
}
}
bLFuncBody.stmts = statements;
bLFuncBody.pos = getPosition(functionBodyBlockNode);
return bLFuncBody;
}
@Override
public BLangNode transform(NamedWorkerDeclarationNode namedWorkerDeclNode) {
BLangFunction bLFunction = (BLangFunction) TreeBuilder.createFunctionNode();
DiagnosticPos pos = getPosition(namedWorkerDeclNode);
bLFunction.name = createIdentifier(pos,
anonymousModelHelper.getNextAnonymousFunctionKey(diagnosticSource.pkgID));
BLangBlockStmt blockStmt = (BLangBlockStmt) namedWorkerDeclNode.workerBody().apply(this);
BLangBlockFunctionBody bodyNode = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
bodyNode.stmts = blockStmt.stmts;
bLFunction.body = bodyNode;
bLFunction.pos = pos;
bLFunction.addFlag(Flag.LAMBDA);
bLFunction.addFlag(Flag.ANONYMOUS);
bLFunction.addFlag(Flag.WORKER);
String workerName = namedWorkerDeclNode.workerName().text();
bLFunction.defaultWorkerName.value = workerName;
bLFunction.defaultWorkerName.pos = getPosition(namedWorkerDeclNode.workerName());
NodeList<AnnotationNode> annotations = namedWorkerDeclNode.annotations();
bLFunction.annAttachments = applyAll(annotations);
Optional<Node> retNode = namedWorkerDeclNode.returnTypeDesc();
if (retNode.isPresent()) {
ReturnTypeDescriptorNode returnType = (ReturnTypeDescriptorNode) retNode.get();
bLFunction.setReturnTypeNode(createTypeNode(returnType.type()));
} else {
BLangValueType bLValueType = (BLangValueType) TreeBuilder.createValueTypeNode();
bLValueType.pos = getPosition(namedWorkerDeclNode);
bLValueType.typeKind = TypeKind.NIL;
bLFunction.setReturnTypeNode(bLValueType);
}
this.additionalTopLevelNodes.add(bLFunction);
BLangLambdaFunction lambdaExpr = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaExpr.function = bLFunction;
lambdaExpr.pos = pos;
String workerLambdaName = WORKER_LAMBDA_VAR_PREFIX + workerName;
BLangSimpleVariable var = new SimpleVarBuilder()
.with(workerLambdaName)
.setExpression(lambdaExpr)
.isDeclaredWithVar()
.isFinal()
.build();
BLangSimpleVariableDef lamdaWrkr = (BLangSimpleVariableDef) TreeBuilder.createSimpleVariableDefinitionNode();
DiagnosticPos workerNamePos = getPosition(namedWorkerDeclNode.workerName());
lamdaWrkr.pos = workerNamePos;
lamdaWrkr.setVariable(var);
lamdaWrkr.isWorker = true;
BLangInvocation bLInvocation = (BLangInvocation) TreeBuilder.createActionInvocation();
BLangIdentifier nameInd = this.createIdentifier(pos, workerLambdaName);
BLangNameReference reference = new BLangNameReference(workerNamePos, null, TreeBuilder.createIdentifierNode(),
nameInd);
bLInvocation.pkgAlias = (BLangIdentifier) reference.pkgAlias;
bLInvocation.name = (BLangIdentifier) reference.name;
bLInvocation.pos = workerNamePos;
bLInvocation.flagSet = new HashSet<>();
bLInvocation.annAttachments = bLFunction.annAttachments;
if (bLInvocation.getKind() == NodeKind.INVOCATION) {
bLInvocation.async = true;
} else {
dlog.error(pos, DiagnosticCode.START_REQUIRE_INVOCATION);
}
BLangSimpleVariable invoc = new SimpleVarBuilder()
.with(workerName, getPosition(namedWorkerDeclNode.workerName()))
.isDeclaredWithVar()
.isWorkerVar()
.setExpression(bLInvocation)
.isFinal()
.build();
BLangSimpleVariableDef workerInvoc = (BLangSimpleVariableDef) TreeBuilder.createSimpleVariableDefinitionNode();
workerInvoc.pos = workerNamePos;
workerInvoc.setVariable(invoc);
workerInvoc.isWorker = true;
this.additionalStatements.push(workerInvoc);
return lamdaWrkr;
}
private <A extends BLangNode, B extends Node> List<A> applyAll(NodeList<B> annotations) {
ArrayList<A> annAttachments = new ArrayList<>();
for (B annotation : annotations) {
A blNode = (A) annotation.apply(this);
annAttachments.add(blNode);
}
return annAttachments;
}
@Override
public BLangNode transform(AnnotationNode annotation) {
Node name = annotation.annotReference();
BLangAnnotationAttachment bLAnnotationAttachment =
(BLangAnnotationAttachment) TreeBuilder.createAnnotAttachmentNode();
if (annotation.annotValue().isPresent()) {
MappingConstructorExpressionNode map = annotation.annotValue().get();
BLangExpression bLExpression = (BLangExpression) map.apply(this);
bLAnnotationAttachment.setExpression(bLExpression);
}
BLangNameReference nameReference = createBLangNameReference(name);
bLAnnotationAttachment.setAnnotationName(nameReference.name);
bLAnnotationAttachment.setPackageAlias(nameReference.pkgAlias);
return bLAnnotationAttachment;
}
@Override
public BLangNode transform(CheckExpressionNode checkExpressionNode) {
BLangCheckedExpr checkedExpr = (BLangCheckedExpr) TreeBuilder.createCheckExpressionNode();
checkedExpr.pos = getPosition(checkExpressionNode);
checkedExpr.expr = createExpression(checkExpressionNode.expression());
return checkedExpr;
}
@Override
public BLangNode transform(TypeTestExpressionNode typeTestExpressionNode) {
BLangTypeTestExpr typeTestExpr = (BLangTypeTestExpr) TreeBuilder.createTypeTestExpressionNode();
typeTestExpr.expr = createExpression(typeTestExpressionNode.expression());
typeTestExpr.typeNode = createTypeNode(typeTestExpressionNode.typeDescriptor());
typeTestExpr.pos = getPosition(typeTestExpressionNode);
return typeTestExpr;
}
@Override
public BLangNode transform(MappingConstructorExpressionNode mapConstruct) {
BLangRecordLiteral bLiteralNode = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
for (MappingFieldNode field : mapConstruct.fields()) {
if (field.kind() == SyntaxKind.SPREAD_FIELD) {
SpreadFieldNode spreadFieldNode = (SpreadFieldNode) field;
BLangRecordSpreadOperatorField bLRecordSpreadOpField =
(BLangRecordSpreadOperatorField) TreeBuilder.createRecordSpreadOperatorField();
bLRecordSpreadOpField.expr = createExpression(spreadFieldNode.valueExpr());
bLiteralNode.fields.add(bLRecordSpreadOpField);
} else {
SpecificFieldNode specificField = (SpecificFieldNode) field;
BLangRecordKeyValueField bLRecordKeyValueField =
(BLangRecordKeyValueField) TreeBuilder.createRecordKeyValue();
bLRecordKeyValueField.valueExpr = createExpression(specificField.valueExpr());
bLRecordKeyValueField.key = new BLangRecordLiteral.BLangRecordKey(
createExpression(specificField.fieldName()));
bLRecordKeyValueField.key.computedKey = false;
bLiteralNode.fields.add(bLRecordKeyValueField);
}
}
bLiteralNode.pos = getPosition(mapConstruct);
return bLiteralNode;
}
@Override
public BLangNode transform(ListConstructorExpressionNode listConstructorExprNode) {
List<BLangExpression> argExprList = new ArrayList<>();
BLangListConstructorExpr listConstructorExpr = (BLangListConstructorExpr)
TreeBuilder.createListConstructorExpressionNode();
for (Node expr : listConstructorExprNode.expressions()) {
argExprList.add(createExpression(expr));
}
listConstructorExpr.exprs = argExprList;
listConstructorExpr.pos = getPosition(listConstructorExprNode);
return listConstructorExpr;
}
@Override
public BLangNode transform(UnaryExpressionNode unaryExprNode) {
DiagnosticPos pos = getPosition(unaryExprNode);
OperatorKind operator = OperatorKind.valueFrom(unaryExprNode.unaryOperator().text());
BLangExpression expr = createExpression(unaryExprNode.expression());
return createBLangUnaryExpr(pos, operator, expr);
}
@Override
public BLangNode transform(TypeofExpressionNode typeofExpressionNode) {
DiagnosticPos pos = getPosition(typeofExpressionNode);
OperatorKind operator = OperatorKind.valueFrom(typeofExpressionNode.typeofKeyword().text());
BLangExpression expr = createExpression(typeofExpressionNode.expression());
return createBLangUnaryExpr(pos, operator, expr);
}
@Override
public BLangNode transform(BinaryExpressionNode binaryExprNode) {
BLangBinaryExpr bLBinaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
bLBinaryExpr.pos = getPosition(binaryExprNode);
bLBinaryExpr.lhsExpr = createExpression(binaryExprNode.lhsExpr());
bLBinaryExpr.rhsExpr = createExpression(binaryExprNode.rhsExpr());
bLBinaryExpr.opKind = OperatorKind.valueFrom(binaryExprNode.operator().text());
return bLBinaryExpr;
}
@Override
public BLangNode transform(FieldAccessExpressionNode fieldAccessExprNode) {
BLangFieldBasedAccess bLFieldBasedAccess = (BLangFieldBasedAccess) TreeBuilder.createFieldBasedAccessNode();
Token fieldName = fieldAccessExprNode.fieldName();
bLFieldBasedAccess.pos = getPosition(fieldAccessExprNode);
BLangNameReference nameRef = createBLangNameReference(fieldName);
bLFieldBasedAccess.field = createIdentifier(getPosition(fieldName), nameRef.name.getValue());
bLFieldBasedAccess.field.pos = getPosition(fieldAccessExprNode);
bLFieldBasedAccess.expr = createExpression(fieldAccessExprNode.expression());
bLFieldBasedAccess.fieldKind = FieldKind.SINGLE;
bLFieldBasedAccess.optionalFieldAccess = false;
return bLFieldBasedAccess;
}
@Override
public BLangNode transform(BracedExpressionNode brcExprOut) {
return createExpression(brcExprOut.expression());
}
@Override
public BLangNode transform(FunctionCallExpressionNode functionCallNode) {
return createBLangInvocation(functionCallNode.functionName(), functionCallNode.arguments(),
getPosition(functionCallNode));
}
public BLangNode transform(MethodCallExpressionNode methodCallExprNode) {
BLangInvocation bLInvocation = createBLangInvocation(methodCallExprNode.methodName(),
methodCallExprNode.arguments(),
getPosition(methodCallExprNode));
bLInvocation.expr = createExpression(methodCallExprNode.expression());
return bLInvocation;
}
@Override
public BLangNode transform(ImplicitNewExpressionNode implicitNewExprNode) {
BLangTypeInit initNode = createTypeInit(implicitNewExprNode);
BLangInvocation invocationNode = createInvocation(implicitNewExprNode, implicitNewExprNode.newKeyword());
initNode.argsExpr.addAll(invocationNode.argExprs);
initNode.initInvocation = invocationNode;
return initNode;
}
@Override
public BLangNode transform(ExplicitNewExpressionNode explicitNewExprNode) {
BLangTypeInit initNode = createTypeInit(explicitNewExprNode);
BLangInvocation invocationNode = createInvocation(explicitNewExprNode, explicitNewExprNode.newKeyword());
initNode.argsExpr.addAll(invocationNode.argExprs);
initNode.initInvocation = invocationNode;
return initNode;
}
private BLangTypeInit createTypeInit(NewExpressionNode expression) {
BLangTypeInit initNode = (BLangTypeInit) TreeBuilder.createInitNode();
initNode.pos = getPosition(expression);
if (expression.kind() == SyntaxKind.EXPLICIT_NEW_EXPRESSION) {
Node type = ((ExplicitNewExpressionNode) expression).typeDescriptor();
initNode.userDefinedType = createTypeNode(type);
}
return initNode;
}
private BLangInvocation createInvocation(NewExpressionNode expression, Token newKeyword) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = getPosition(expression);
populateArgsInvocation(expression, invocationNode);
BLangNameReference nameReference = createBLangNameReference(newKeyword);
invocationNode.name = (BLangIdentifier) nameReference.name;
invocationNode.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
return invocationNode;
}
private void populateArgsInvocation(NewExpressionNode expression, BLangInvocation invocationNode) {
Iterator<FunctionArgumentNode> argumentsIter = getArgumentNodesIterator(expression);
if (argumentsIter != null) {
while (argumentsIter.hasNext()) {
BLangExpression argument = createExpression(argumentsIter.next());
invocationNode.argExprs.add(argument);
}
}
}
private Iterator<FunctionArgumentNode> getArgumentNodesIterator(NewExpressionNode expression) {
Iterator<FunctionArgumentNode> argumentsIter = null;
if (expression.kind() == SyntaxKind.IMPLICIT_NEW_EXPRESSION) {
Optional<ParenthesizedArgList> argsList = ((ImplicitNewExpressionNode) expression).parenthesizedArgList();
if (argsList.isPresent()) {
ParenthesizedArgList argList = argsList.get();
argumentsIter = argList.arguments().iterator();
}
} else {
ParenthesizedArgList argList =
(ParenthesizedArgList) ((ExplicitNewExpressionNode) expression).parenthesizedArgList();
argumentsIter = argList.arguments().iterator();
}
return argumentsIter;
}
@Override
public BLangIndexBasedAccess transform(IndexedExpressionNode indexedExpressionNode) {
BLangIndexBasedAccess indexBasedAccess = (BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode();
indexBasedAccess.pos = getPosition(indexedExpressionNode);
indexBasedAccess.indexExpr = createExpression(indexedExpressionNode.keyExpression());
indexBasedAccess.expr = createExpression(indexedExpressionNode.containerExpression());
return indexBasedAccess;
}
@Override
public BLangTypeConversionExpr transform(TypeCastExpressionNode typeCastExpressionNode) {
BLangTypeConversionExpr typeConversionNode = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
typeConversionNode.pos = getPosition(typeCastExpressionNode);
if (typeCastExpressionNode.typeCastParam() != null) {
typeConversionNode.typeNode = createTypeNode(typeCastExpressionNode.typeCastParam().type());
}
typeConversionNode.expr = createExpression(typeCastExpressionNode.expression());
return typeConversionNode;
}
@Override
public BLangNode transform(Token token) {
SyntaxKind kind = token.kind();
switch (kind) {
case XML_TEXT_CONTENT:
case TEMPLATE_STRING:
return createSimpleLiteral(token);
default:
throw new RuntimeException("Syntax kind is not supported: " + kind);
}
}
@Override
public BLangNode transform(InterpolationNode interpolationNode) {
return createExpression(interpolationNode.expression());
}
@Override
public BLangNode transform(TemplateExpressionNode expressionNode) {
SyntaxKind kind = expressionNode.kind();
switch (kind) {
case XML_TEMPLATE_EXPRESSION:
return expressionNode.content().get(0).apply(this);
case STRING_TEMPLATE_EXPRESSION:
return createStringTemplateLiteral(expressionNode.content(), getPosition(expressionNode));
default:
throw new RuntimeException("Syntax kind is not supported: " + kind);
}
}
@Override
public BLangNode transform(ReturnStatementNode returnStmtNode) {
BLangReturn bLReturn = (BLangReturn) TreeBuilder.createReturnNode();
bLReturn.pos = getPosition(returnStmtNode);
if (returnStmtNode.expression().isPresent()) {
bLReturn.expr = createExpression(returnStmtNode.expression().get());
} else {
BLangLiteral nilLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
nilLiteral.pos = getPosition(returnStmtNode);
nilLiteral.value = Names.NIL_VALUE;
nilLiteral.type = symTable.nilType;
bLReturn.expr = nilLiteral;
}
return bLReturn;
}
@Override
public BLangNode transform(PanicStatementNode panicStmtNode) {
BLangPanic bLPanic = (BLangPanic) TreeBuilder.createPanicNode();
bLPanic.pos = getPosition(panicStmtNode);
bLPanic.expr = createExpression(panicStmtNode.expression());
return bLPanic;
}
@Override
public BLangNode transform(ContinueStatementNode continueStmtNode) {
BLangContinue bLContinue = (BLangContinue) TreeBuilder.createContinueNode();
bLContinue.pos = getPosition(continueStmtNode);
return bLContinue;
}
@Override
public BLangNode transform(BreakStatementNode breakStmtNode) {
BLangBreak bLBreak = (BLangBreak) TreeBuilder.createBreakNode();
bLBreak.pos = getPosition(breakStmtNode);
return bLBreak;
}
@Override
public BLangNode transform(AssignmentStatementNode assignmentStmtNode) {
BLangAssignment bLAssignment = (BLangAssignment) TreeBuilder.createAssignmentNode();
BLangExpression lhsExpr = createExpression(assignmentStmtNode.varRef());
validateLvexpr(lhsExpr, DiagnosticCode.INVALID_INVOCATION_LVALUE_ASSIGNMENT);
bLAssignment.setExpression(createExpression(assignmentStmtNode.expression()));
bLAssignment.pos = getPosition(assignmentStmtNode);
bLAssignment.varRef = lhsExpr;
return bLAssignment;
}
@Override
public BLangNode transform(CompoundAssignmentStatementNode compoundAssignmentStmtNode) {
BLangCompoundAssignment bLCompAssignment = (BLangCompoundAssignment) TreeBuilder.createCompoundAssignmentNode();
bLCompAssignment.setExpression(createExpression(compoundAssignmentStmtNode.rhsExpression()));
bLCompAssignment.setVariable(
(BLangVariableReference) createExpression(compoundAssignmentStmtNode.lhsExpression())
);
bLCompAssignment.pos = getPosition(compoundAssignmentStmtNode);
bLCompAssignment.opKind = OperatorKind.valueFrom(compoundAssignmentStmtNode.binaryOperator().text());
return bLCompAssignment;
}
private void validateLvexpr(ExpressionNode lExprNode, DiagnosticCode errorCode) {
if (lExprNode.getKind() == NodeKind.INVOCATION) {
dlog.error(((BLangInvocation) lExprNode).pos, errorCode);
}
if (lExprNode.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR
|| lExprNode.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
validateLvexpr(((BLangAccessExpression) lExprNode).expr, errorCode);
}
}
@Override
public BLangNode transform(WhileStatementNode whileStmtNode) {
BLangWhile bLWhile = (BLangWhile) TreeBuilder.createWhileNode();
bLWhile.setCondition(createExpression(whileStmtNode.condition()));
bLWhile.pos = getPosition(whileStmtNode);
BLangBlockStmt bLBlockStmt = (BLangBlockStmt) whileStmtNode.whileBody().apply(this);
bLBlockStmt.pos = getPosition(whileStmtNode.whileBody());
bLWhile.setBody(bLBlockStmt);
return bLWhile;
}
@Override
public BLangNode transform(IfElseStatementNode ifElseStmtNode) {
BLangIf bLIf = (BLangIf) TreeBuilder.createIfElseStatementNode();
bLIf.pos = getPosition(ifElseStmtNode);
bLIf.setCondition(createExpression(ifElseStmtNode.condition()));
bLIf.setBody((BLangBlockStmt) ifElseStmtNode.ifBody().apply(this));
ifElseStmtNode.elseBody().ifPresent(elseBody -> {
ElseBlockNode elseNode = (ElseBlockNode) elseBody;
bLIf.setElseStatement(
(org.ballerinalang.model.tree.statements.StatementNode) elseNode.elseBody().apply(this));
});
return bLIf;
}
@Override
public BLangNode transform(BlockStatementNode blockStatement) {
BLangBlockStmt bLBlockStmt = (BLangBlockStmt) TreeBuilder.createBlockNode();
List<BLangStatement> statements = new ArrayList<>();
for (StatementNode statement : blockStatement.statements()) {
if (statement != null) {
statements.add((BLangStatement) statement.apply(this));
}
}
bLBlockStmt.stmts = statements;
return bLBlockStmt;
}
@Override
public BLangNode transform(VariableDeclarationNode varDeclaration) {
BLangSimpleVariableDef bLVarDef = (BLangSimpleVariableDef) TreeBuilder.createSimpleVariableDefinitionNode();
bLVarDef.pos = getPosition(varDeclaration);
BLangSimpleVariable simpleVar = new SimpleVarBuilder()
.with(varDeclaration.variableName().text(), getPosition(varDeclaration.variableName()))
.setTypeByNode(varDeclaration.typeName())
.setExpressionByNode(varDeclaration.initializer().orElse(null))
.setFinal(varDeclaration.finalKeyword().isPresent())
.build();
bLVarDef.setVariable(simpleVar);
return bLVarDef;
}
@Override
public BLangNode transform(ExpressionStatementNode expressionStatement) {
BLangExpressionStmt bLExpressionStmt = (BLangExpressionStmt) TreeBuilder.createExpressionStatementNode();
bLExpressionStmt.expr = (BLangExpression) expressionStatement.expression().apply(this);
bLExpressionStmt.pos = getPosition(expressionStatement);
return bLExpressionStmt;
}
@Override
public BLangNode transform(PositionalArgumentNode argumentNode) {
return createExpression(argumentNode.expression());
}
@Override
public BLangNode transform(NamedArgumentNode namedArgumentNode) {
BLangNamedArgsExpression namedArg = (BLangNamedArgsExpression) TreeBuilder.createNamedArgNode();
namedArg.pos = getPosition(namedArgumentNode);
namedArg.name = this.createIdentifier(getPosition(namedArgumentNode.argumentName()),
namedArgumentNode.argumentName().name().text());
namedArg.expr = createExpression(namedArgumentNode.expression());
return namedArg;
}
@Override
public BLangNode transform(RestArgumentNode restArgumentNode) {
return restArgumentNode.expression().apply(this);
}
@Override
public BLangNode transform(RequiredParameterNode requiredParameter) {
BLangSimpleVariable simpleVar = createSimpleVar(requiredParameter.paramName(),
requiredParameter.typeName());
Optional<Token> visibilityQual = requiredParameter.visibilityQualifier();
if (visibilityQual.isPresent() && visibilityQual.get().kind() == SyntaxKind.PUBLIC_KEYWORD) {
simpleVar.flagSet.add(Flag.PUBLIC);
}
simpleVar.pos = getPosition(requiredParameter);
return simpleVar;
}
@Override
public BLangNode transform(DefaultableParameterNode defaultableParameter) {
BLangSimpleVariable simpleVar = createSimpleVar(defaultableParameter.paramName(),
defaultableParameter.typeName());
Optional<Token> visibilityQual = defaultableParameter.visibilityQualifier();
if (visibilityQual.isPresent() && visibilityQual.get().kind() == SyntaxKind.PUBLIC_KEYWORD) {
simpleVar.flagSet.add(Flag.PUBLIC);
}
simpleVar.setInitialExpression(createExpression(defaultableParameter.expression()));
simpleVar.pos = getPosition(defaultableParameter);
return simpleVar;
}
@Override
public BLangNode transform(RestParameterNode restParameter) {
BLangSimpleVariable bLSimpleVar = createSimpleVar(restParameter.paramName(), restParameter.typeName());
BLangArrayType bLArrayType = (BLangArrayType) TreeBuilder.createArrayTypeNode();
bLArrayType.elemtype = bLSimpleVar.typeNode;
bLArrayType.dimensions = 1;
bLSimpleVar.typeNode = bLArrayType;
bLArrayType.pos = getPosition(restParameter.typeName());
bLSimpleVar.pos = getPosition(restParameter);
return bLSimpleVar;
}
@Override
public BLangNode transform(OptionalTypeDescriptorNode optTypeDescriptor) {
BLangValueType nilTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
nilTypeNode.pos = getPosition(optTypeDescriptor.questionMarkToken());
nilTypeNode.typeKind = TypeKind.NIL;
BLangUnionTypeNode unionTypeNode = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode();
unionTypeNode.memberTypeNodes.add(createTypeNode(optTypeDescriptor.typeDescriptor()));
unionTypeNode.memberTypeNodes.add(nilTypeNode);
unionTypeNode.pos = getPosition(optTypeDescriptor);
return unionTypeNode;
}
@Override
public BLangNode transform(ParameterizedTypeDescriptorNode parameterizedTypeDescNode) {
BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
BLangValueType typeNode = createBuiltInTypeNode(parameterizedTypeDescNode.parameterizedType());
refType.typeKind = typeNode.typeKind;
refType.pos = typeNode.pos;
BLangConstrainedType constrainedType = (BLangConstrainedType) TreeBuilder.createConstrainedTypeNode();
constrainedType.type = refType;
constrainedType.constraint = createTypeNode(parameterizedTypeDescNode.typeNode());
constrainedType.pos = getPosition(parameterizedTypeDescNode);
return constrainedType;
}
@Override
public BLangNode transform(SimpleNameReferenceNode simpleNameRefNode) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = getPosition(simpleNameRefNode);
varRef.variableName = createIdentifier(getPosition(simpleNameRefNode.name()), simpleNameRefNode.name().text());
varRef.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
return varRef;
}
@Override
public BLangNode transform(QualifiedNameReferenceNode qualifiedNameReferenceNode) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = getPosition(qualifiedNameReferenceNode);
varRef.variableName = createIdentifier(getPosition(qualifiedNameReferenceNode.identifier()),
qualifiedNameReferenceNode.identifier().text());
varRef.pkgAlias = createIdentifier(getPosition(qualifiedNameReferenceNode.modulePrefix()),
qualifiedNameReferenceNode.modulePrefix().text());
return varRef;
}
@Override
public BLangNode transform(XMLProcessingInstruction xmlProcessingInstruction) {
BLangXMLProcInsLiteral xmlProcInsLiteral =
(BLangXMLProcInsLiteral) TreeBuilder.createXMLProcessingIntsructionLiteralNode();
for (Node dataNode: xmlProcessingInstruction.data()) {
xmlProcInsLiteral.dataFragments.add(createExpression(dataNode));
}
xmlProcInsLiteral.target = (BLangLiteral) xmlProcessingInstruction.target().apply(this);
return xmlProcInsLiteral;
}
@Override
public BLangNode transform(XMLSimpleNameNode xmlSimpleNameNode) {
BLangLiteral bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
bLiteral.pos = getPosition(xmlSimpleNameNode);
bLiteral.type = symTable.getTypeFromTag(TypeTags.STRING);
bLiteral.value = xmlSimpleNameNode.name().text();
bLiteral.originalValue = xmlSimpleNameNode.name().text();
return bLiteral;
}
@Override
public BLangNode transform(XMLComment xmlComment) {
BLangXMLCommentLiteral xmlCommentLiteral = (BLangXMLCommentLiteral) TreeBuilder.createXMLCommentLiteralNode();
for (Node commentNode: xmlComment.content()) {
xmlCommentLiteral.textFragments.add((BLangExpression) commentNode.apply(this));
}
xmlCommentLiteral.pos = getPosition(xmlComment);
return xmlCommentLiteral;
}
@Override
public BLangNode transform(XMLElementNode xmlElementNode) {
BLangXMLElementLiteral xmlElement = (BLangXMLElementLiteral) TreeBuilder.createXMLElementLiteralNode();
xmlElement.startTagName = createExpression(xmlElementNode.startTag());
xmlElement.endTagName = createExpression(xmlElementNode.endTag());
for (Node node : xmlElementNode.content()) {
if (node.kind() == SyntaxKind.XML_TEXT) {
xmlElement.children.add(createSimpleLiteral(((XMLTextNode) node).content()));
continue;
}
xmlElement.children.add(createExpression(node));
}
xmlElement.pos = getPosition(xmlElementNode);
xmlElement.isRoot = true;
return xmlElement;
}
@Override
public BLangNode transform(XMLStartTagNode startTagNode) {
return createStartEndXMLTag(startTagNode.name(), getPosition(startTagNode));
}
@Override
public BLangNode transform(XMLEndTagNode endTagNode) {
return createStartEndXMLTag(endTagNode.name(), getPosition(endTagNode));
}
@Override
public BLangNode transform(XMLTextNode xmlTextNode) {
BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode();
xmlTextLiteral.textFragments.add(0, (BLangExpression) xmlTextNode.content().apply(this));
xmlTextLiteral.pos = getPosition(xmlTextNode);
return xmlTextLiteral;
}
@Override
protected BLangNode transformSyntaxNode(Node node) {
throw new RuntimeException("Node not supported: " + node.getClass().getSimpleName());
}
private void populateFuncSignature(BLangFunction bLFunction, FunctionSignatureNode funcSignature) {
for (ParameterNode child : funcSignature.parameters()) {
SimpleVariableNode param = (SimpleVariableNode) child.apply(this);
if (child instanceof RestParameterNode) {
bLFunction.setRestParameter(param);
} else {
bLFunction.addParameter(param);
}
}
Optional<ReturnTypeDescriptorNode> retNode = funcSignature.returnTypeDesc();
if (retNode.isPresent()) {
ReturnTypeDescriptorNode returnType = retNode.get();
bLFunction.setReturnTypeNode(createTypeNode(returnType.type()));
} else {
BLangValueType bLValueType = (BLangValueType) TreeBuilder.createValueTypeNode();
bLValueType.pos = getPosition(funcSignature);
bLValueType.typeKind = TypeKind.NIL;
bLFunction.setReturnTypeNode(bLValueType);
}
}
private BLangUnaryExpr createBLangUnaryExpr(DiagnosticPos pos, OperatorKind operatorKind, BLangExpression expr) {
BLangUnaryExpr bLUnaryExpr = (BLangUnaryExpr) TreeBuilder.createUnaryExpressionNode();
bLUnaryExpr.pos = pos;
bLUnaryExpr.operator = operatorKind;
bLUnaryExpr.expr = expr;
return bLUnaryExpr;
}
private BLangXMLQName createStartEndXMLTag(XMLNameNode xmlNameNode, DiagnosticPos pos) {
BLangXMLQName xmlName = (BLangXMLQName) TreeBuilder.createXMLQNameNode();
SyntaxKind kind = xmlNameNode.kind();
xmlName.pos = pos;
switch (kind) {
case XML_QUALIFIED_NAME:
xmlName.localname = createIdentifier(getPosition(((XMLQualifiedNameNode) xmlNameNode).name()),
((XMLQualifiedNameNode) xmlNameNode).name().toString());
xmlName.prefix = createIdentifier(getPosition(((XMLQualifiedNameNode) xmlNameNode).prefix()),
((XMLQualifiedNameNode) xmlNameNode).prefix().toString());
return xmlName;
case XML_SIMPLE_NAME:
xmlName.localname = createIdentifier(getPosition(((XMLSimpleNameNode) xmlNameNode).name()),
((XMLSimpleNameNode) xmlNameNode).name().text());
xmlName.prefix = createIdentifier(null, "");
return xmlName;
default:
throw new RuntimeException("Syntax kind is not supported: " + kind);
}
}
private BLangExpression createExpression(Node expression) {
if (isSimpleLiteral(expression.kind())) {
return createSimpleLiteral(expression);
} else if (expression.kind() == SyntaxKind.SIMPLE_NAME_REFERENCE ||
expression.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE ||
expression.kind() == SyntaxKind.IDENTIFIER_TOKEN) {
BLangNameReference nameReference = createBLangNameReference(expression);
BLangSimpleVarRef bLVarRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
bLVarRef.pos = getPosition(expression);
bLVarRef.pkgAlias = this.createIdentifier((DiagnosticPos) nameReference.pkgAlias.getPosition(),
nameReference.pkgAlias.getValue());
bLVarRef.variableName = this.createIdentifier((DiagnosticPos) nameReference.name.getPosition(),
nameReference.name.getValue());
return bLVarRef;
} else if (expression.kind() == SyntaxKind.BRACED_EXPRESSION) {
BLangGroupExpr group = (BLangGroupExpr) TreeBuilder.createGroupExpressionNode();
group.expression = (BLangExpression) expression.apply(this);
return group;
} else {
return (BLangExpression) expression.apply(this);
}
}
private BLangNode createStringTemplateLiteral(NodeList<TemplateMemberNode> memberNodes, DiagnosticPos pos) {
BLangStringTemplateLiteral stringTemplateLiteral =
(BLangStringTemplateLiteral) TreeBuilder.createStringTemplateLiteralNode();
for (Node memberNode : memberNodes) {
stringTemplateLiteral.exprs.add((BLangExpression) memberNode.apply(this));
}
stringTemplateLiteral.pos = pos;
return stringTemplateLiteral;
}
private BLangSimpleVariable createSimpleVar(Token name, Node type) {
return createSimpleVar(name, type, null, false, false, null);
}
private BLangSimpleVariable createSimpleVar(Token name, Node typeName, Node initializer, boolean isFinal,
boolean isListenerVar,
Token visibilityQualifier) {
return createSimpleVar(name.text(), getPosition(name), typeName, initializer, isFinal, isListenerVar,
visibilityQualifier);
}
private BLangSimpleVariable createSimpleVar(String name, DiagnosticPos pos, Node typeName, Node initializer,
boolean isFinal,
boolean isListenerVar,
Token visibilityQualifier) {
BLangSimpleVariable bLSimpleVar = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
bLSimpleVar.setName(this.createIdentifier(pos, name));
if (typeName == null || typeName.kind() == SyntaxKind.VAR_TYPE_DESC) {
bLSimpleVar.isDeclaredWithVar = true;
} else {
bLSimpleVar.setTypeNode(createTypeNode(typeName));
}
if (visibilityQualifier != null) {
if (visibilityQualifier.kind() == SyntaxKind.PRIVATE_KEYWORD) {
bLSimpleVar.flagSet.add(Flag.PRIVATE);
} else if (visibilityQualifier.kind() == SyntaxKind.PUBLIC_KEYWORD) {
bLSimpleVar.flagSet.add(Flag.PUBLIC);
}
}
if (isFinal) {
markVariableAsFinal(bLSimpleVar);
}
if (initializer != null) {
bLSimpleVar.setInitialExpression(createExpression(initializer));
}
if (isListenerVar) {
bLSimpleVar.flagSet.add(Flag.LISTENER);
bLSimpleVar.flagSet.add(Flag.FINAL);
}
return bLSimpleVar;
}
private BLangIdentifier createIdentifier(DiagnosticPos pos, String value) {
return createIdentifier(pos, value, null);
}
private BLangIdentifier createIdentifier(DiagnosticPos pos, String value, Set<Whitespace> ws) {
BLangIdentifier bLIdentifer = (BLangIdentifier) TreeBuilder.createIdentifierNode();
if (value == null) {
return bLIdentifer;
}
if (value.startsWith(IDENTIFIER_LITERAL_PREFIX)) {
if (!escapeQuotedIdentifier(value).matches("^[0-9a-zA-Z.]*$")) {
dlog.error(pos, DiagnosticCode.IDENTIFIER_LITERAL_ONLY_SUPPORTS_ALPHANUMERICS);
}
String unescapedValue = StringEscapeUtils.unescapeJava(value);
bLIdentifer.setValue(unescapedValue.substring(1));
bLIdentifer.originalValue = value;
bLIdentifer.setLiteral(true);
} else {
bLIdentifer.setValue(value);
bLIdentifer.setLiteral(false);
}
bLIdentifer.pos = pos;
if (ws != null) {
bLIdentifer.addWS(ws);
}
return bLIdentifer;
}
private BLangLiteral createSimpleLiteral(Node literal) {
BLangLiteral bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
SyntaxKind type = literal.kind();
int typeTag = -1;
Object value = null;
String originalValue = null;
String textValue;
if (literal instanceof BasicLiteralNode) {
textValue = ((BasicLiteralNode) literal).literalToken().text();
} else if (literal instanceof Token) {
textValue = ((Token) literal).text();
} else {
textValue = "";
}
if (type == SyntaxKind.DECIMAL_INTEGER_LITERAL || type == SyntaxKind.HEX_INTEGER_LITERAL) {
typeTag = TypeTags.INT;
value = getIntegerLiteral(literal, textValue);
originalValue = textValue;
bLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression();
} else if (type == SyntaxKind.DECIMAL_FLOATING_POINT_LITERAL) {
typeTag = NumericLiteralSupport.isDecimalDiscriminated(textValue) ? TypeTags.DECIMAL : TypeTags.FLOAT;
value = textValue;
originalValue = textValue;
bLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression();
} else if (type == SyntaxKind.HEX_FLOATING_POINT_LITERAL) {
typeTag = TypeTags.FLOAT;
value = getHexNodeValue(textValue);
originalValue = textValue;
bLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression();
} else if (type == SyntaxKind.TRUE_KEYWORD || type == SyntaxKind.FALSE_KEYWORD) {
typeTag = TypeTags.BOOLEAN;
value = Boolean.parseBoolean(textValue);
originalValue = textValue;
bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
} else if (type == SyntaxKind.STRING_LITERAL || type == SyntaxKind.XML_TEXT_CONTENT ||
type == SyntaxKind.TEMPLATE_STRING) {
String text = textValue;
if (type == SyntaxKind.STRING_LITERAL) {
text = text.substring(1, text.length() - 1);
}
String originalText = text;
Matcher matcher = UNICODE_PATTERN.matcher(text);
int position = 0;
while (matcher.find(position)) {
String hexStringVal = matcher.group(1);
int hexDecimalVal = Integer.parseInt(hexStringVal, 16);
if ((hexDecimalVal >= Constants.MIN_UNICODE && hexDecimalVal <= Constants.MIDDLE_LIMIT_UNICODE)
|| hexDecimalVal > Constants.MAX_UNICODE) {
String hexStringWithBraces = matcher.group(0);
int offset = originalText.indexOf(hexStringWithBraces) + 1;
DiagnosticPos pos = getPosition(literal);
dlog.error(new DiagnosticPos(this.diagnosticSource, pos.sLine, pos.eLine, pos.sCol + offset,
pos.sCol + offset + hexStringWithBraces.length()),
DiagnosticCode.INVALID_UNICODE, hexStringWithBraces);
}
text = matcher.replaceFirst("\\\\u" + fillWithZeros(hexStringVal));
position = matcher.end() - 2;
matcher = UNICODE_PATTERN.matcher(text);
}
text = StringEscapeUtils.unescapeJava(text);
typeTag = TypeTags.STRING;
value = text;
originalValue = textValue;
bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
} else if (type == SyntaxKind.NULL_KEYWORD) {
typeTag = TypeTags.NIL;
value = null;
originalValue = "null";
bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
} else if (type == SyntaxKind.NIL_LITERAL) {
typeTag = TypeTags.NIL;
value = null;
originalValue = "()";
bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
} else if (type == SyntaxKind.BINARY_EXPRESSION) {
typeTag = TypeTags.BYTE_ARRAY;
value = textValue;
originalValue = textValue;
if (isNumericLiteral(type)) {
bLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression();
} else {
bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
}
}
bLiteral.pos = getPosition(literal);
bLiteral.type = symTable.getTypeFromTag(typeTag);
bLiteral.type.tag = typeTag;
bLiteral.value = value;
bLiteral.originalValue = originalValue;
return bLiteral;
}
private BLangType createTypeNode(Node type) {
if (type instanceof BuiltinSimpleNameReferenceNode || type.kind() == SyntaxKind.NIL_TYPE_DESC) {
return createBuiltInTypeNode(type);
} else if (type.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE || type.kind() == SyntaxKind.IDENTIFIER_TOKEN) {
BLangUserDefinedType bLUserDefinedType = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode();
BLangNameReference nameReference = createBLangNameReference(type);
bLUserDefinedType.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
bLUserDefinedType.typeName = (BLangIdentifier) nameReference.name;
bLUserDefinedType.pos = getPosition(type);
return bLUserDefinedType;
} else if (type.kind() == SyntaxKind.SIMPLE_NAME_REFERENCE) {
SimpleNameReferenceNode nameReferenceNode = (SimpleNameReferenceNode) type;
return createTypeNode(nameReferenceNode.name());
} else if (type.kind() == SyntaxKind.RECORD_TYPE_DESC) {
BLangTypeDefinition bLTypeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
String genName = anonymousModelHelper.getNextAnonymousTypeKey(diagnosticSource.pkgID);
IdentifierNode anonTypeGenName = createIdentifier(getPosition(type), genName);
bLTypeDef.setName(anonTypeGenName);
bLTypeDef.flagSet.add(Flag.PUBLIC);
bLTypeDef.flagSet.add(Flag.ANONYMOUS);
bLTypeDef.typeNode = (BLangType) type.apply(this);
bLTypeDef.pos = getPosition(type);
additionalTopLevelNodes.push(bLTypeDef);
additionalTopLevelNodes.push(bLTypeDef);
BLangUserDefinedType bLUserDefinedType = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode();
bLUserDefinedType.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
bLUserDefinedType.typeName = bLTypeDef.name;
bLUserDefinedType.pos = getPosition(type);
return bLUserDefinedType;
} else if (type.kind() == SyntaxKind.INDEXED_EXPRESSION) {
return createBLangArrayType((IndexedExpressionNode) type);
} else {
return (BLangType) type.apply(this);
}
}
private BLangArrayType createBLangArrayType(IndexedExpressionNode indexedExpressionNode) {
int dimensions = 1;
List<Integer> sizes = new ArrayList<>();
while (true) {
Node keyExpr = indexedExpressionNode.keyExpression();
if (keyExpr == null) {
sizes.add(UNSEALED_ARRAY_INDICATOR);
} else {
if (keyExpr.kind() == SyntaxKind.DECIMAL_INTEGER_LITERAL) {
sizes.add(Integer.parseInt(keyExpr.toString()));
} else if (keyExpr.kind() == SyntaxKind.ASTERISK_TOKEN) {
sizes.add(OPEN_SEALED_ARRAY_INDICATOR);
} else {
}
}
if (indexedExpressionNode.containerExpression().kind() != SyntaxKind.INDEXED_EXPRESSION) {
break;
}
indexedExpressionNode = (IndexedExpressionNode) indexedExpressionNode.containerExpression();
dimensions++;
}
BLangArrayType arrayTypeNode = (BLangArrayType) TreeBuilder.createArrayTypeNode();
arrayTypeNode.pos = getPosition(indexedExpressionNode);
arrayTypeNode.elemtype = createTypeNode(indexedExpressionNode.containerExpression());
arrayTypeNode.dimensions = dimensions;
arrayTypeNode.sizes = sizes.stream().mapToInt(val -> val).toArray();
return arrayTypeNode;
}
private BLangValueType createBuiltInTypeNode(Node type) {
BLangValueType bLValueType = (BLangValueType) TreeBuilder.createValueTypeNode();
String typeText;
if (type.kind() == SyntaxKind.NIL_TYPE_DESC) {
typeText = "()";
} else if (type instanceof BuiltinSimpleNameReferenceNode) {
typeText = ((BuiltinSimpleNameReferenceNode) type).name().text();
} else {
typeText = ((Token) type).text();
}
bLValueType.typeKind = TreeUtils.stringToTypeKind(typeText.replaceAll("\\s+", ""));
bLValueType.pos = getPosition(type);
return bLValueType;
}
private VariableNode createBasicVarNodeWithoutType(DiagnosticPos pos, Set<Whitespace> ws, String identifier,
DiagnosticPos identifierPos, ExpressionNode expr) {
BLangSimpleVariable bLSimpleVar = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
bLSimpleVar.pos = pos;
IdentifierNode name = this.createIdentifier(identifierPos, identifier, ws);
((BLangIdentifier) name).pos = identifierPos;
bLSimpleVar.setName(name);
bLSimpleVar.addWS(ws);
if (expr != null) {
bLSimpleVar.setInitialExpression(expr);
}
return bLSimpleVar;
}
private BLangInvocation createBLangInvocation(Node nameNode, NodeList<FunctionArgumentNode> arguments,
DiagnosticPos position) {
BLangInvocation bLInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
BLangNameReference reference = createBLangNameReference(nameNode);
bLInvocation.pkgAlias = (BLangIdentifier) reference.pkgAlias;
bLInvocation.name = (BLangIdentifier) reference.name;
List<BLangExpression> args = new ArrayList<>();
arguments.iterator().forEachRemaining(arg -> args.add((BLangExpression) arg.apply(this)));
bLInvocation.argExprs = args;
bLInvocation.pos = position;
return bLInvocation;
}
private BLangNameReference createBLangNameReference(Node node) {
if (node.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
QualifiedNameReferenceNode iNode = (QualifiedNameReferenceNode) node;
Token modulePrefix = iNode.modulePrefix();
IdentifierToken identifier = iNode.identifier();
BLangIdentifier pkgAlias = this.createIdentifier(getPosition(modulePrefix), modulePrefix.text());
BLangIdentifier name = this.createIdentifier(getPosition(identifier), identifier.text());
return new BLangNameReference(getPosition(node), null, pkgAlias, name);
} else if (node.kind() == SyntaxKind.IDENTIFIER_TOKEN || node.kind() == SyntaxKind.ERROR_KEYWORD) {
Token token = (Token) node;
BLangIdentifier pkgAlias = this.createIdentifier(null, "");
BLangIdentifier name = this.createIdentifier(getPosition(token), token.text());
return new BLangNameReference(getPosition(node), null, pkgAlias, name);
} else if (node.kind() == SyntaxKind.NEW_KEYWORD) {
Token iToken = (Token) node;
BLangIdentifier pkgAlias = this.createIdentifier(getPosition(iToken), "");
BLangIdentifier name = this.createIdentifier(getPosition(iToken), iToken.text());
return new BLangNameReference(getPosition(node), null, pkgAlias, name);
} else {
SimpleNameReferenceNode nameReferenceNode = (SimpleNameReferenceNode) node;
return createBLangNameReference(nameReferenceNode.name());
}
}
private Object getIntegerLiteral(Node literal, String nodeValue) {
SyntaxKind type = literal.kind();
if (type == SyntaxKind.DECIMAL_INTEGER_LITERAL) {
return parseLong(literal, nodeValue, nodeValue, 10, DiagnosticCode.INTEGER_TOO_SMALL,
DiagnosticCode.INTEGER_TOO_LARGE);
} else if (type == SyntaxKind.HEX_INTEGER_LITERAL) {
String processedNodeValue = nodeValue.toLowerCase().replace("0x", "");
return parseLong(literal, nodeValue, processedNodeValue, 16,
DiagnosticCode.HEXADECIMAL_TOO_SMALL, DiagnosticCode.HEXADECIMAL_TOO_LARGE);
}
return null;
}
private Object parseLong(Node literal, String originalNodeValue,
String processedNodeValue, int radix,
DiagnosticCode code1, DiagnosticCode code2) {
try {
return Long.parseLong(processedNodeValue, radix);
} catch (Exception e) {
DiagnosticPos pos = getPosition(literal);
if (originalNodeValue.startsWith("-")) {
dlog.error(pos, code1, originalNodeValue);
} else {
dlog.error(pos, code2, originalNodeValue);
}
}
return originalNodeValue;
}
private String getHexNodeValue(String value) {
if (!(value.contains("p") || value.contains("P"))) {
value = value + "p0";
}
return value;
}
private String fillWithZeros(String str) {
while (str.length() < 4) {
str = "0".concat(str);
}
return str;
}
private void markVariableAsFinal(BLangVariable variable) {
variable.flagSet.add(Flag.FINAL);
switch (variable.getKind()) {
case TUPLE_VARIABLE:
BLangTupleVariable tupleVariable = (BLangTupleVariable) variable;
tupleVariable.memberVariables.forEach(this::markVariableAsFinal);
if (tupleVariable.restVariable != null) {
markVariableAsFinal(tupleVariable.restVariable);
}
break;
case RECORD_VARIABLE:
BLangRecordVariable recordVariable = (BLangRecordVariable) variable;
recordVariable.variableList.stream()
.map(BLangRecordVariable.BLangRecordVariableKeyValue::getValue)
.forEach(this::markVariableAsFinal);
if (recordVariable.restParam != null) {
markVariableAsFinal((BLangVariable) recordVariable.restParam);
}
break;
case ERROR_VARIABLE:
BLangErrorVariable errorVariable = (BLangErrorVariable) variable;
markVariableAsFinal(errorVariable.reason);
errorVariable.detail.forEach(entry -> markVariableAsFinal(entry.valueBindingPattern));
if (errorVariable.restDetail != null) {
markVariableAsFinal(errorVariable.restDetail);
}
break;
}
}
private boolean isSimpleLiteral(SyntaxKind syntaxKind) {
switch (syntaxKind) {
case STRING_LITERAL:
case DECIMAL_INTEGER_LITERAL:
case HEX_INTEGER_LITERAL:
case DECIMAL_FLOATING_POINT_LITERAL:
case HEX_FLOATING_POINT_LITERAL:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case NIL_LITERAL:
case NULL_KEYWORD:
return true;
default:
return false;
}
}
private boolean isNumericLiteral(SyntaxKind syntaxKind) {
switch (syntaxKind) {
case DECIMAL_INTEGER_LITERAL:
case HEX_INTEGER_LITERAL:
case DECIMAL_FLOATING_POINT_LITERAL:
case HEX_FLOATING_POINT_LITERAL:
return true;
default:
return false;
}
}
private static String escapeQuotedIdentifier(String identifier) {
if (identifier.startsWith(IDENTIFIER_LITERAL_PREFIX)) {
identifier = StringEscapeUtils.unescapeJava(identifier).substring(1);
}
return identifier;
}
private boolean isPresent(Node node) {
return node.kind() != SyntaxKind.NONE;
}
private BLangUnionTypeNode addUnionType(BLangType lhsTypeNode, BLangType rhsTypeNode, DiagnosticPos position) {
BLangUnionTypeNode unionTypeNode;
if (rhsTypeNode.getKind() == NodeKind.UNION_TYPE_NODE) {
unionTypeNode = (BLangUnionTypeNode) rhsTypeNode;
unionTypeNode.memberTypeNodes.add(0, lhsTypeNode);
} else if (lhsTypeNode.getKind() == NodeKind.UNION_TYPE_NODE) {
unionTypeNode = (BLangUnionTypeNode) lhsTypeNode;
unionTypeNode.memberTypeNodes.add(rhsTypeNode);
} else {
unionTypeNode = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode();
unionTypeNode.memberTypeNodes.add(lhsTypeNode);
unionTypeNode.memberTypeNodes.add(rhsTypeNode);
}
unionTypeNode.pos = position;
return unionTypeNode;
}
private class SimpleVarBuilder {
private BLangIdentifier name;
private BLangType type;
private boolean isDeclaredWithVar;
private Set<Flag> flags = new HashSet<>();
private boolean isFinal;
private ExpressionNode expr;
public BLangSimpleVariable build() {
BLangSimpleVariable bLSimpleVar = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
bLSimpleVar.setName(this.name);
bLSimpleVar.setTypeNode(this.type);
bLSimpleVar.isDeclaredWithVar = this.isDeclaredWithVar;
bLSimpleVar.setTypeNode(this.type);
bLSimpleVar.flagSet.addAll(this.flags);
if (this.isFinal) {
markVariableAsFinal(bLSimpleVar);
}
bLSimpleVar.setInitialExpression(this.expr);
return bLSimpleVar;
}
public SimpleVarBuilder with(String name) {
this.name = createIdentifier(null, name);
return this;
}
public SimpleVarBuilder with(String name, DiagnosticPos identifierPos) {
this.name = createIdentifier(identifierPos, name);
return this;
}
public SimpleVarBuilder setTypeByNode(Node typeName) {
if (typeName == null || typeName.kind() == SyntaxKind.VAR_TYPE_DESC) {
this.isDeclaredWithVar = true;
}
this.type = createTypeNode(typeName);
return this;
}
public SimpleVarBuilder setExpressionByNode(Node initExprNode) {
this.expr = initExprNode != null ? createExpression(initExprNode) : null;
return this;
}
public SimpleVarBuilder setExpression(ExpressionNode expression) {
this.expr = expression;
return this;
}
public SimpleVarBuilder isDeclaredWithVar() {
this.isDeclaredWithVar = true;
return this;
}
public SimpleVarBuilder isFinal() {
this.isFinal = true;
return this;
}
public SimpleVarBuilder isListenerVar() {
this.flags.add(Flag.LISTENER);
this.flags.add(Flag.FINAL);
return this;
}
public SimpleVarBuilder setVisibility(Token visibilityQualifier) {
if (visibilityQualifier != null) {
if (visibilityQualifier.kind() == SyntaxKind.PRIVATE_KEYWORD) {
this.flags.add(Flag.PRIVATE);
} else if (visibilityQualifier.kind() == SyntaxKind.PUBLIC_KEYWORD) {
this.flags.add(Flag.PUBLIC);
}
}
return this;
}
public SimpleVarBuilder setFinal(boolean present) {
this.isFinal = present;
return this;
}
public SimpleVarBuilder setOptional(boolean present) {
if (present) {
this.flags.add(Flag.PUBLIC);
} else {
this.flags.remove(Flag.PUBLIC);
}
return this;
}
public SimpleVarBuilder setRequired(boolean present) {
if (present) {
this.flags.add(Flag.REQUIRED);
} else {
this.flags.remove(Flag.REQUIRED);
}
return this;
}
public SimpleVarBuilder isPublic() {
this.flags.add(Flag.PUBLIC);
return this;
}
public SimpleVarBuilder isWorkerVar() {
this.flags.add(Flag.WORKER);
return this;
}
}
} | class BLangNodeTransformer extends NodeTransformer<BLangNode> {
private static final String IDENTIFIER_LITERAL_PREFIX = "'";
private BLangDiagnosticLogHelper dlog;
private SymbolTable symTable;
private BDiagnosticSource diagnosticSource;
private static final Pattern UNICODE_PATTERN = Pattern.compile(Constants.UNICODE_REGEX);
private BLangAnonymousModelHelper anonymousModelHelper;
/* To keep track of additional top-level nodes produced from multi-BLangNode resultant transformations */
private Stack<TopLevelNode> additionalTopLevelNodes = new Stack<>();
/* To keep track of additional statements produced from multi-BLangNode resultant transformations */
private Stack<BLangStatement> additionalStatements = new Stack<>();
public BLangNodeTransformer(CompilerContext context, BDiagnosticSource diagnosticSource) {
this.dlog = BLangDiagnosticLogHelper.getInstance(context);
this.symTable = SymbolTable.getInstance(context);
this.diagnosticSource = diagnosticSource;
this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context);
}
public List<org.ballerinalang.model.tree.Node> accept(Node node) {
BLangNode bLangNode = node.apply(this);
List<org.ballerinalang.model.tree.Node> nodes = new ArrayList<>();
while (!additionalTopLevelNodes.empty()) {
nodes.add(additionalTopLevelNodes.pop());
}
while (!additionalStatements.empty()) {
nodes.add(additionalStatements.pop());
}
nodes.add(bLangNode);
return nodes;
}
@Override
public BLangNode transform(IdentifierToken identifierToken) {
return this.createIdentifier(getPosition(identifierToken), identifierToken.text());
}
private DiagnosticPos getPosition(Node node) {
if (node == null) {
return null;
}
LineRange lineRange = node.lineRange();
LinePosition startPos = lineRange.startLine();
LinePosition endPos = lineRange.endLine();
return new DiagnosticPos(diagnosticSource, startPos.line() + 1, endPos.line() + 1,
startPos.offset() + 1, endPos.offset() + 1);
}
@Override
public BLangNode transform(ModulePartNode modulePart) {
BLangCompilationUnit compilationUnit = (BLangCompilationUnit) TreeBuilder.createCompilationUnit();
compilationUnit.name = diagnosticSource.cUnitName;
DiagnosticPos pos = getPosition(modulePart);
for (ImportDeclarationNode importDecl : modulePart.imports()) {
BLangImportPackage bLangImport = (BLangImportPackage) importDecl.apply(this);
bLangImport.compUnit = this.createIdentifier(pos, compilationUnit.getName());
compilationUnit.addTopLevelNode(bLangImport);
}
for (ModuleMemberDeclarationNode member : modulePart.members()) {
compilationUnit.addTopLevelNode((TopLevelNode) member.apply(this));
}
while (!this.additionalTopLevelNodes.empty()) {
compilationUnit.addTopLevelNode(this.additionalTopLevelNodes.pop());
}
compilationUnit.pos = pos;
return compilationUnit;
}
@Override
public BLangNode transform(ModuleVariableDeclarationNode modVarDeclrNode) {
BLangSimpleVariable simpleVar = createSimpleVar(modVarDeclrNode.variableName(),
modVarDeclrNode.typeName(), modVarDeclrNode.initializer(),
modVarDeclrNode.finalKeyword().isPresent(), false, null);
simpleVar.pos = getPosition(modVarDeclrNode);
return simpleVar;
}
@Override
public BLangNode transform(ImportDeclarationNode importDeclaration) {
Node orgNameNode = importDeclaration.orgName().orElse(null);
Node versionNode = importDeclaration.version().orElse(null);
Node prefixNode = importDeclaration.prefix().orElse(null);
String orgName = null;
if (orgNameNode != null) {
ImportOrgNameNode importOrgName = (ImportOrgNameNode) orgNameNode;
orgName = importOrgName.orgName().text();
}
String version = null;
if (versionNode != null) {
version = ((ImportVersionNode) versionNode).versionNumber().toString();
}
String prefix = null;
if (prefixNode != null) {
prefix = ((ImportPrefixNode) prefixNode).prefix().toString();
}
List<BLangIdentifier> pkgNameComps = new ArrayList<>();
NodeList<IdentifierToken> names = importDeclaration.moduleName();
names.forEach(name -> pkgNameComps.add(this.createIdentifier(getPosition(name), name.text(), null)));
BLangImportPackage importDcl = (BLangImportPackage) TreeBuilder.createImportPackageNode();
importDcl.pos = getPosition(importDeclaration);
importDcl.pkgNameComps = pkgNameComps;
importDcl.orgName = this.createIdentifier(getPosition(orgNameNode), orgName);
importDcl.version = this.createIdentifier(getPosition(versionNode), version);
importDcl.alias = (prefix != null && !prefix.isEmpty()) ? this.createIdentifier(getPosition(prefixNode), prefix,
null) :
pkgNameComps.get(pkgNameComps.size() - 1);
return importDcl;
}
public BLangNode transform(ConstantDeclarationNode constantDeclarationNode) {
BLangConstant constantNode = (BLangConstant) TreeBuilder.createConstantNode();
DiagnosticPos pos = getPosition(constantDeclarationNode);
DiagnosticPos identifierPos = getPosition(constantDeclarationNode.variableName());
constantNode.name = createIdentifier(pos, constantDeclarationNode.variableName().text());
constantNode.expr = createExpression(constantDeclarationNode.initializer());
if (constantDeclarationNode.typeDescriptor() != null) {
constantNode.typeNode = createTypeNode(constantDeclarationNode.typeDescriptor());
}
constantNode.flagSet.add(Flag.CONSTANT);
if (constantDeclarationNode.visibilityQualifier() != null &&
constantDeclarationNode.visibilityQualifier().kind() == SyntaxKind.PUBLIC_KEYWORD) {
constantNode.flagSet.add(Flag.PUBLIC);
}
NodeKind nodeKind = constantNode.expr.getKind();
if (nodeKind == NodeKind.LITERAL || nodeKind == NodeKind.NUMERIC_LITERAL) {
BLangLiteral literal = nodeKind == NodeKind.LITERAL ?
(BLangLiteral) TreeBuilder.createLiteralExpression() :
(BLangLiteral) TreeBuilder.createNumericLiteralExpression();
literal.setValue(((BLangLiteral) constantNode.expr).value);
literal.type = constantNode.expr.type;
literal.isConstant = true;
BLangFiniteTypeNode finiteTypeNode = (BLangFiniteTypeNode) TreeBuilder.createFiniteTypeNode();
finiteTypeNode.valueSpace.add(literal);
BLangTypeDefinition typeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
String genName = anonymousModelHelper.getNextAnonymousTypeKey(pos.src.pkgID);
IdentifierNode anonTypeGenName = createIdentifier(identifierPos, genName);
typeDef.setName(anonTypeGenName);
typeDef.flagSet.add(Flag.PUBLIC);
typeDef.flagSet.add(Flag.ANONYMOUS);
typeDef.typeNode = finiteTypeNode;
typeDef.pos = pos;
constantNode.associatedTypeDefinition = typeDef;
}
return constantNode;
}
public BLangNode transform(TypeDefinitionNode typeDefNode) {
BLangTypeDefinition typeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
BLangIdentifier identifierNode = this.createIdentifier(getPosition(typeDefNode.typeName()),
typeDefNode.typeName().text());
typeDef.setName(identifierNode);
typeDef.typeNode = createTypeNode(typeDefNode.typeDescriptor());
typeDefNode.visibilityQualifier().ifPresent(visibilityQual -> {
if (visibilityQual.kind() == SyntaxKind.PUBLIC_KEYWORD) {
typeDef.flagSet.add(Flag.PUBLIC);
}
});
typeDef.pos = getPosition(typeDefNode);
return typeDef;
}
@Override
public BLangNode transform(UnionTypeDescriptorNode unionTypeDescriptorNode) {
BLangType rhsTypeNode = createTypeNode(unionTypeDescriptorNode.rightTypeDesc());
BLangType lhsTypeNode = createTypeNode(unionTypeDescriptorNode.leftTypeDesc());
return addUnionType(lhsTypeNode, rhsTypeNode, getPosition(unionTypeDescriptorNode));
}
@Override
public BLangNode transform(ParenthesisedTypeDescriptorNode parenthesisedTypeDescriptorNode) {
BLangType typeNode = createTypeNode(parenthesisedTypeDescriptorNode.typedesc());
typeNode.grouped = true;
return typeNode;
}
@Override
public BLangNode transform(TypeParameterNode typeParameterNode) {
return createTypeNode(typeParameterNode.typeNode());
}
@Override
@Override
public BLangNode transform(ErrorTypeDescriptorNode errorTypeDescriptorNode) {
BLangErrorType errorType = (BLangErrorType) TreeBuilder.createErrorTypeNode();
if (errorTypeDescriptorNode.errorTypeParamsNode().isPresent()) {
errorType.reasonType = createTypeNode(errorTypeDescriptorNode.errorTypeParamsNode().get());
}
return errorType;
}
@Override
public BLangNode transform(ErrorTypeParamsNode errorTypeParamsNode) {
return createTypeNode(errorTypeParamsNode.parameter());
}
@Override
public BLangNode transform(ObjectTypeDescriptorNode objTypeDescNode) {
BLangObjectTypeNode objectTypeNode = (BLangObjectTypeNode) TreeBuilder.createObjectTypeNode();
for (Token qualifier : objTypeDescNode.objectTypeQualifiers()) {
if (qualifier.kind() == SyntaxKind.ABSTRACT_KEYWORD) {
objectTypeNode.flagSet.add(Flag.ABSTRACT);
}
if (qualifier.kind() == SyntaxKind.CLIENT_KEYWORD) {
objectTypeNode.flagSet.add(Flag.CLIENT);
}
if (qualifier.kind() == SyntaxKind.SERVICE_KEYWORD) {
objectTypeNode.flagSet.add(Flag.SERVICE);
}
}
for (Node node : objTypeDescNode.members()) {
BLangNode bLangNode = node.apply(this);
if (bLangNode.getKind() == NodeKind.FUNCTION) {
BLangFunction bLangFunction = (BLangFunction) bLangNode;
bLangFunction.attachedFunction = true;
if (Names.USER_DEFINED_INIT_SUFFIX.value.equals(bLangFunction.name.value)) {
bLangFunction.objInitFunction = true;
objectTypeNode.initFunction = bLangFunction;
} else {
objectTypeNode.addFunction(bLangFunction);
}
} else if (bLangNode.getKind() == NodeKind.VARIABLE) {
objectTypeNode.addField((BLangSimpleVariable) bLangNode);
}
}
objectTypeNode.isAnonymous = false;
objectTypeNode.pos = getPosition(objTypeDescNode);
return objectTypeNode;
}
@Override
public BLangNode transform(ObjectFieldNode objFieldNode) {
BLangSimpleVariable simpleVar = createSimpleVar(objFieldNode.fieldName(), objFieldNode.typeName(),
objFieldNode.expression(),
false, false, objFieldNode.visibilityQualifier());
simpleVar.pos = getPosition(objFieldNode);
return simpleVar;
}
@Override
public BLangNode transform(ServiceDeclarationNode serviceDeclrNode) {
return createService(serviceDeclrNode, serviceDeclrNode.serviceName(), false);
}
private BLangNode createService(ServiceDeclarationNode serviceDeclrNode, IdentifierToken serviceNameNode,
boolean isAnonServiceValue) {
BLangService bLService = (BLangService) TreeBuilder.createServiceNode();
bLService.isAnonymousServiceValue = isAnonServiceValue;
DiagnosticPos pos = getPosition(serviceDeclrNode);
String serviceName;
DiagnosticPos identifierPos;
if (isAnonServiceValue) {
serviceName = this.anonymousModelHelper.getNextAnonymousServiceVarKey(diagnosticSource.pkgID);
identifierPos = pos;
} else {
serviceName = serviceNameNode.text();
identifierPos = getPosition(serviceNameNode);
}
String serviceTypeName = this.anonymousModelHelper.getNextAnonymousServiceTypeKey(diagnosticSource.pkgID,
serviceName);
BLangIdentifier serviceVar = createIdentifier(identifierPos, serviceName);
serviceVar.pos = identifierPos;
bLService.setName(serviceVar);
if (!isAnonServiceValue) {
}
this.additionalTopLevelNodes.add(bLService);
BLangTypeDefinition bLTypeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
BLangIdentifier serviceTypeID = createIdentifier(identifierPos, serviceTypeName);
serviceTypeID.pos = pos;
bLTypeDef.setName(serviceTypeID);
bLTypeDef.flagSet.add(Flag.SERVICE);
bLTypeDef.typeNode = (BLangType) serviceDeclrNode.serviceBody().apply(this);
bLTypeDef.pos = pos;
bLService.serviceTypeDefinition = bLTypeDef;
final BLangServiceConstructorExpr serviceConstNode = (BLangServiceConstructorExpr) TreeBuilder
.createServiceConstructorNode();
serviceConstNode.serviceNode = bLService;
serviceConstNode.pos = pos;
bLService.pos = pos;
if (!isAnonServiceValue) {
BLangSimpleVariable var = (BLangSimpleVariable) createBasicVarNodeWithoutType(identifierPos,
Collections.emptySet(),
serviceName, identifierPos,
serviceConstNode);
var.flagSet.add(Flag.FINAL);
var.flagSet.add(Flag.SERVICE);
BLangUserDefinedType bLUserDefinedType = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode();
bLUserDefinedType.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
bLUserDefinedType.typeName = bLTypeDef.name;
bLUserDefinedType.pos = pos;
var.typeNode = bLUserDefinedType;
bLService.variableNode = var;
this.additionalTopLevelNodes.add(bLTypeDef);
return var;
} else {
return bLTypeDef;
}
}
@Override
public BLangNode transform(ServiceBodyNode serviceBodyNode) {
BLangObjectTypeNode objectTypeNode = (BLangObjectTypeNode) TreeBuilder.createObjectTypeNode();
objectTypeNode.flagSet.add(Flag.SERVICE);
for (Node resourceNode : serviceBodyNode.resources()) {
BLangNode bLangNode = resourceNode.apply(this);
if (bLangNode.getKind() == NodeKind.FUNCTION) {
BLangFunction bLangFunction = (BLangFunction) bLangNode;
bLangFunction.attachedFunction = true;
objectTypeNode.addFunction(bLangFunction);
}
}
objectTypeNode.isAnonymous = false;
objectTypeNode.pos = getPosition(serviceBodyNode);
return objectTypeNode;
}
@Override
public BLangNode transform(RecordTypeDescriptorNode recordTypeDescriptorNode) {
BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) TreeBuilder.createRecordTypeNode();
boolean hasRestField = false;
for (Node field : recordTypeDescriptorNode.fields()) {
if (field.kind() == SyntaxKind.RECORD_FIELD || field.kind() == SyntaxKind.RECORD_FIELD_WITH_DEFAULT_VALUE) {
recordTypeNode.fields.add((BLangSimpleVariable) field.apply(this));
} else if (field.kind() == SyntaxKind.RECORD_REST_TYPE) {
recordTypeNode.restFieldType = (BLangValueType) field.apply(this);
hasRestField = true;
} else if (field.kind() == SyntaxKind.TYPE_REFERENCE) {
recordTypeNode.addTypeReference((BLangType) field.apply(this));
}
}
recordTypeNode.sealed = !hasRestField;
recordTypeNode.pos = getPosition(recordTypeDescriptorNode);
return recordTypeNode;
}
@Override
public BLangNode transform(TypeReferenceNode typeReferenceNode) {
return createTypeNode(typeReferenceNode.typeName());
}
@Override
public BLangNode transform(RecordFieldNode recordFieldNode) {
BLangSimpleVariable simpleVar = createSimpleVar(recordFieldNode.fieldName(), recordFieldNode.typeName());
simpleVar.flagSet.add(Flag.PUBLIC);
if (recordFieldNode.questionMarkToken().isPresent()) {
simpleVar.flagSet.add(Flag.OPTIONAL);
} else {
simpleVar.flagSet.add(Flag.REQUIRED);
}
simpleVar.pos = getPosition(recordFieldNode);
return simpleVar;
}
@Override
public BLangNode transform(RecordFieldWithDefaultValueNode recordFieldNode) {
BLangSimpleVariable simpleVar = createSimpleVar(recordFieldNode.fieldName(), recordFieldNode.typeName());
simpleVar.flagSet.add(Flag.PUBLIC);
if (isPresent(recordFieldNode.expression())) {
simpleVar.setInitialExpression(createExpression(recordFieldNode.expression()));
}
simpleVar.pos = getPosition(recordFieldNode);
return simpleVar;
}
@Override
public BLangNode transform(RecordRestDescriptorNode recordFieldNode) {
return createTypeNode(recordFieldNode.typeName());
}
@Override
public BLangNode transform(FunctionDefinitionNode funcDefNode) {
BLangFunction bLFunction = (BLangFunction) TreeBuilder.createFunctionNode();
IdentifierToken funcName = funcDefNode.functionName();
bLFunction.name = createIdentifier(getPosition(funcName), funcName.text());
funcDefNode.visibilityQualifier().ifPresent(visibilityQual -> {
if (visibilityQual.kind() == SyntaxKind.PUBLIC_KEYWORD) {
bLFunction.flagSet.add(Flag.PUBLIC);
} else if (visibilityQual.kind() == SyntaxKind.PRIVATE_KEYWORD) {
bLFunction.flagSet.add(Flag.PRIVATE);
} else if (visibilityQual.kind() == SyntaxKind.REMOTE_KEYWORD) {
bLFunction.flagSet.add(Flag.REMOTE);
} else if (visibilityQual.kind() == SyntaxKind.RESOURCE_KEYWORD) {
bLFunction.flagSet.add(Flag.RESOURCE);
}
});
populateFuncSignature(bLFunction, funcDefNode.functionSignature());
if (funcDefNode.functionBody() == null) {
bLFunction.body = null;
bLFunction.flagSet.add(Flag.INTERFACE);
bLFunction.interfaceFunction = true;
} else {
bLFunction.body = (BLangFunctionBody) funcDefNode.functionBody().apply(this);
if (bLFunction.body.getKind() == NodeKind.EXTERN_FUNCTION_BODY) {
bLFunction.flagSet.add(Flag.NATIVE);
}
}
bLFunction.pos = getPosition(funcDefNode);
return bLFunction;
}
@Override
public BLangNode transform(ExternalFunctionBodyNode externalFunctionBodyNode) {
BLangExternalFunctionBody externFunctionBodyNode =
(BLangExternalFunctionBody) TreeBuilder.createExternFunctionBodyNode();
externFunctionBodyNode.annAttachments = applyAll(externalFunctionBodyNode.annotations());
return externFunctionBodyNode;
}
@Override
public BLangNode transform(ExplicitAnonymousFunctionExpressionNode anonFuncExprNode) {
BLangFunction bLFunction = (BLangFunction) TreeBuilder.createFunctionNode();
DiagnosticPos pos = getPosition(anonFuncExprNode);
bLFunction.name = createIdentifier(pos,
anonymousModelHelper.getNextAnonymousFunctionKey(diagnosticSource.pkgID));
populateFuncSignature(bLFunction, anonFuncExprNode.functionSignature());
bLFunction.body = (BLangFunctionBody) anonFuncExprNode.functionBody().apply(this);
bLFunction.pos = pos;
bLFunction.addFlag(Flag.LAMBDA);
bLFunction.addFlag(Flag.ANONYMOUS);
this.additionalTopLevelNodes.add(bLFunction);
BLangLambdaFunction lambdaExpr = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaExpr.function = bLFunction;
lambdaExpr.pos = pos;
return lambdaExpr;
}
@Override
public BLangNode transform(FunctionBodyBlockNode functionBodyBlockNode) {
BLangBlockFunctionBody bLFuncBody = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
List<BLangStatement> statements = new ArrayList<>();
for (StatementNode statement : functionBodyBlockNode.statements()) {
if (statement != null) {
statements.add((BLangStatement) statement.apply(this));
}
}
if (functionBodyBlockNode.namedWorkerDeclarator().isPresent()) {
NamedWorkerDeclarator namedWorkerDeclarator = functionBodyBlockNode.namedWorkerDeclarator().get();
for (StatementNode statement : namedWorkerDeclarator.workerInitStatements()) {
statements.add((BLangStatement) statement.apply(this));
}
for (NamedWorkerDeclarationNode workerDeclarationNode : namedWorkerDeclarator.namedWorkerDeclarations()) {
statements.add((BLangStatement) workerDeclarationNode.apply(this));
while (!this.additionalStatements.empty()) {
statements.add(additionalStatements.pop());
}
}
}
bLFuncBody.stmts = statements;
bLFuncBody.pos = getPosition(functionBodyBlockNode);
return bLFuncBody;
}
@Override
public BLangNode transform(NamedWorkerDeclarationNode namedWorkerDeclNode) {
BLangFunction bLFunction = (BLangFunction) TreeBuilder.createFunctionNode();
DiagnosticPos pos = getPosition(namedWorkerDeclNode);
bLFunction.name = createIdentifier(pos,
anonymousModelHelper.getNextAnonymousFunctionKey(diagnosticSource.pkgID));
BLangBlockStmt blockStmt = (BLangBlockStmt) namedWorkerDeclNode.workerBody().apply(this);
BLangBlockFunctionBody bodyNode = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
bodyNode.stmts = blockStmt.stmts;
bLFunction.body = bodyNode;
bLFunction.pos = pos;
bLFunction.addFlag(Flag.LAMBDA);
bLFunction.addFlag(Flag.ANONYMOUS);
bLFunction.addFlag(Flag.WORKER);
String workerName = namedWorkerDeclNode.workerName().text();
bLFunction.defaultWorkerName.value = workerName;
bLFunction.defaultWorkerName.pos = getPosition(namedWorkerDeclNode.workerName());
NodeList<AnnotationNode> annotations = namedWorkerDeclNode.annotations();
bLFunction.annAttachments = applyAll(annotations);
Optional<Node> retNode = namedWorkerDeclNode.returnTypeDesc();
if (retNode.isPresent()) {
ReturnTypeDescriptorNode returnType = (ReturnTypeDescriptorNode) retNode.get();
bLFunction.setReturnTypeNode(createTypeNode(returnType.type()));
} else {
BLangValueType bLValueType = (BLangValueType) TreeBuilder.createValueTypeNode();
bLValueType.pos = getPosition(namedWorkerDeclNode);
bLValueType.typeKind = TypeKind.NIL;
bLFunction.setReturnTypeNode(bLValueType);
}
this.additionalTopLevelNodes.add(bLFunction);
BLangLambdaFunction lambdaExpr = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaExpr.function = bLFunction;
lambdaExpr.pos = pos;
String workerLambdaName = WORKER_LAMBDA_VAR_PREFIX + workerName;
BLangSimpleVariable var = new SimpleVarBuilder()
.with(workerLambdaName)
.setExpression(lambdaExpr)
.isDeclaredWithVar()
.isFinal()
.build();
BLangSimpleVariableDef lamdaWrkr = (BLangSimpleVariableDef) TreeBuilder.createSimpleVariableDefinitionNode();
DiagnosticPos workerNamePos = getPosition(namedWorkerDeclNode.workerName());
lamdaWrkr.pos = workerNamePos;
lamdaWrkr.setVariable(var);
lamdaWrkr.isWorker = true;
BLangInvocation bLInvocation = (BLangInvocation) TreeBuilder.createActionInvocation();
BLangIdentifier nameInd = this.createIdentifier(pos, workerLambdaName);
BLangNameReference reference = new BLangNameReference(workerNamePos, null, TreeBuilder.createIdentifierNode(),
nameInd);
bLInvocation.pkgAlias = (BLangIdentifier) reference.pkgAlias;
bLInvocation.name = (BLangIdentifier) reference.name;
bLInvocation.pos = workerNamePos;
bLInvocation.flagSet = new HashSet<>();
bLInvocation.annAttachments = bLFunction.annAttachments;
if (bLInvocation.getKind() == NodeKind.INVOCATION) {
bLInvocation.async = true;
} else {
dlog.error(pos, DiagnosticCode.START_REQUIRE_INVOCATION);
}
BLangSimpleVariable invoc = new SimpleVarBuilder()
.with(workerName, getPosition(namedWorkerDeclNode.workerName()))
.isDeclaredWithVar()
.isWorkerVar()
.setExpression(bLInvocation)
.isFinal()
.build();
BLangSimpleVariableDef workerInvoc = (BLangSimpleVariableDef) TreeBuilder.createSimpleVariableDefinitionNode();
workerInvoc.pos = workerNamePos;
workerInvoc.setVariable(invoc);
workerInvoc.isWorker = true;
this.additionalStatements.push(workerInvoc);
return lamdaWrkr;
}
private <A extends BLangNode, B extends Node> List<A> applyAll(NodeList<B> annotations) {
ArrayList<A> annAttachments = new ArrayList<>();
for (B annotation : annotations) {
A blNode = (A) annotation.apply(this);
annAttachments.add(blNode);
}
return annAttachments;
}
@Override
public BLangNode transform(AnnotationNode annotation) {
Node name = annotation.annotReference();
BLangAnnotationAttachment bLAnnotationAttachment =
(BLangAnnotationAttachment) TreeBuilder.createAnnotAttachmentNode();
if (annotation.annotValue().isPresent()) {
MappingConstructorExpressionNode map = annotation.annotValue().get();
BLangExpression bLExpression = (BLangExpression) map.apply(this);
bLAnnotationAttachment.setExpression(bLExpression);
}
BLangNameReference nameReference = createBLangNameReference(name);
bLAnnotationAttachment.setAnnotationName(nameReference.name);
bLAnnotationAttachment.setPackageAlias(nameReference.pkgAlias);
bLAnnotationAttachment.pos = getPosition(annotation);
return bLAnnotationAttachment;
}
@Override
public BLangNode transform(CheckExpressionNode checkExpressionNode) {
DiagnosticPos pos = getPosition(checkExpressionNode);
BLangExpression expr = createExpression(checkExpressionNode.expression());
if (checkExpressionNode.checkKeyword().kind() == SyntaxKind.CHECK_KEYWORD) {
return createCheckExpr(pos, expr);
}
return createCheckPanickedExpr(pos, expr);
}
@Override
public BLangNode transform(TypeTestExpressionNode typeTestExpressionNode) {
BLangTypeTestExpr typeTestExpr = (BLangTypeTestExpr) TreeBuilder.createTypeTestExpressionNode();
typeTestExpr.expr = createExpression(typeTestExpressionNode.expression());
typeTestExpr.typeNode = createTypeNode(typeTestExpressionNode.typeDescriptor());
typeTestExpr.pos = getPosition(typeTestExpressionNode);
return typeTestExpr;
}
@Override
public BLangNode transform(MappingConstructorExpressionNode mapConstruct) {
BLangRecordLiteral bLiteralNode = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
for (MappingFieldNode field : mapConstruct.fields()) {
if (field.kind() == SyntaxKind.SPREAD_FIELD) {
SpreadFieldNode spreadFieldNode = (SpreadFieldNode) field;
BLangRecordSpreadOperatorField bLRecordSpreadOpField =
(BLangRecordSpreadOperatorField) TreeBuilder.createRecordSpreadOperatorField();
bLRecordSpreadOpField.expr = createExpression(spreadFieldNode.valueExpr());
bLiteralNode.fields.add(bLRecordSpreadOpField);
} else {
SpecificFieldNode specificField = (SpecificFieldNode) field;
BLangRecordKeyValueField bLRecordKeyValueField =
(BLangRecordKeyValueField) TreeBuilder.createRecordKeyValue();
bLRecordKeyValueField.valueExpr = createExpression(specificField.valueExpr());
bLRecordKeyValueField.key = new BLangRecordLiteral.BLangRecordKey(
createExpression(specificField.fieldName()));
bLRecordKeyValueField.key.computedKey = false;
bLiteralNode.fields.add(bLRecordKeyValueField);
}
}
bLiteralNode.pos = getPosition(mapConstruct);
return bLiteralNode;
}
@Override
public BLangNode transform(ListConstructorExpressionNode listConstructorExprNode) {
List<BLangExpression> argExprList = new ArrayList<>();
BLangListConstructorExpr listConstructorExpr = (BLangListConstructorExpr)
TreeBuilder.createListConstructorExpressionNode();
for (Node expr : listConstructorExprNode.expressions()) {
argExprList.add(createExpression(expr));
}
listConstructorExpr.exprs = argExprList;
listConstructorExpr.pos = getPosition(listConstructorExprNode);
return listConstructorExpr;
}
@Override
public BLangNode transform(UnaryExpressionNode unaryExprNode) {
DiagnosticPos pos = getPosition(unaryExprNode);
OperatorKind operator = OperatorKind.valueFrom(unaryExprNode.unaryOperator().text());
BLangExpression expr = createExpression(unaryExprNode.expression());
return createBLangUnaryExpr(pos, operator, expr);
}
@Override
public BLangNode transform(TypeofExpressionNode typeofExpressionNode) {
DiagnosticPos pos = getPosition(typeofExpressionNode);
OperatorKind operator = OperatorKind.valueFrom(typeofExpressionNode.typeofKeyword().text());
BLangExpression expr = createExpression(typeofExpressionNode.expression());
return createBLangUnaryExpr(pos, operator, expr);
}
@Override
public BLangNode transform(BinaryExpressionNode binaryExprNode) {
BLangBinaryExpr bLBinaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
bLBinaryExpr.pos = getPosition(binaryExprNode);
bLBinaryExpr.lhsExpr = createExpression(binaryExprNode.lhsExpr());
bLBinaryExpr.rhsExpr = createExpression(binaryExprNode.rhsExpr());
bLBinaryExpr.opKind = OperatorKind.valueFrom(binaryExprNode.operator().text());
return bLBinaryExpr;
}
@Override
public BLangNode transform(FieldAccessExpressionNode fieldAccessExprNode) {
BLangFieldBasedAccess bLFieldBasedAccess = (BLangFieldBasedAccess) TreeBuilder.createFieldBasedAccessNode();
Token fieldName = fieldAccessExprNode.fieldName();
bLFieldBasedAccess.pos = getPosition(fieldAccessExprNode);
BLangNameReference nameRef = createBLangNameReference(fieldName);
bLFieldBasedAccess.field = createIdentifier(getPosition(fieldName), nameRef.name.getValue());
bLFieldBasedAccess.field.pos = getPosition(fieldAccessExprNode);
bLFieldBasedAccess.expr = createExpression(fieldAccessExprNode.expression());
bLFieldBasedAccess.fieldKind = FieldKind.SINGLE;
bLFieldBasedAccess.optionalFieldAccess = false;
return bLFieldBasedAccess;
}
@Override
public BLangNode transform(BracedExpressionNode brcExprOut) {
return createExpression(brcExprOut.expression());
}
@Override
public BLangNode transform(FunctionCallExpressionNode functionCallNode) {
return createBLangInvocation(functionCallNode.functionName(), functionCallNode.arguments(),
getPosition(functionCallNode));
}
public BLangNode transform(MethodCallExpressionNode methodCallExprNode) {
BLangInvocation bLInvocation = createBLangInvocation(methodCallExprNode.methodName(),
methodCallExprNode.arguments(),
getPosition(methodCallExprNode));
bLInvocation.expr = createExpression(methodCallExprNode.expression());
return bLInvocation;
}
@Override
public BLangNode transform(ImplicitNewExpressionNode implicitNewExprNode) {
BLangTypeInit initNode = createTypeInit(implicitNewExprNode);
BLangInvocation invocationNode = createInvocation(implicitNewExprNode, implicitNewExprNode.newKeyword());
initNode.argsExpr.addAll(invocationNode.argExprs);
initNode.initInvocation = invocationNode;
return initNode;
}
@Override
public BLangNode transform(ExplicitNewExpressionNode explicitNewExprNode) {
BLangTypeInit initNode = createTypeInit(explicitNewExprNode);
BLangInvocation invocationNode = createInvocation(explicitNewExprNode, explicitNewExprNode.newKeyword());
initNode.argsExpr.addAll(invocationNode.argExprs);
initNode.initInvocation = invocationNode;
return initNode;
}
private BLangTypeInit createTypeInit(NewExpressionNode expression) {
BLangTypeInit initNode = (BLangTypeInit) TreeBuilder.createInitNode();
initNode.pos = getPosition(expression);
if (expression.kind() == SyntaxKind.EXPLICIT_NEW_EXPRESSION) {
Node type = ((ExplicitNewExpressionNode) expression).typeDescriptor();
initNode.userDefinedType = createTypeNode(type);
}
return initNode;
}
private BLangInvocation createInvocation(NewExpressionNode expression, Token newKeyword) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = getPosition(expression);
populateArgsInvocation(expression, invocationNode);
BLangNameReference nameReference = createBLangNameReference(newKeyword);
invocationNode.name = (BLangIdentifier) nameReference.name;
invocationNode.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
return invocationNode;
}
private void populateArgsInvocation(NewExpressionNode expression, BLangInvocation invocationNode) {
Iterator<FunctionArgumentNode> argumentsIter = getArgumentNodesIterator(expression);
if (argumentsIter != null) {
while (argumentsIter.hasNext()) {
BLangExpression argument = createExpression(argumentsIter.next());
invocationNode.argExprs.add(argument);
}
}
}
private Iterator<FunctionArgumentNode> getArgumentNodesIterator(NewExpressionNode expression) {
Iterator<FunctionArgumentNode> argumentsIter = null;
if (expression.kind() == SyntaxKind.IMPLICIT_NEW_EXPRESSION) {
Optional<ParenthesizedArgList> argsList = ((ImplicitNewExpressionNode) expression).parenthesizedArgList();
if (argsList.isPresent()) {
ParenthesizedArgList argList = argsList.get();
argumentsIter = argList.arguments().iterator();
}
} else {
ParenthesizedArgList argList =
(ParenthesizedArgList) ((ExplicitNewExpressionNode) expression).parenthesizedArgList();
argumentsIter = argList.arguments().iterator();
}
return argumentsIter;
}
@Override
public BLangIndexBasedAccess transform(IndexedExpressionNode indexedExpressionNode) {
BLangIndexBasedAccess indexBasedAccess = (BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode();
indexBasedAccess.pos = getPosition(indexedExpressionNode);
indexBasedAccess.indexExpr = createExpression(indexedExpressionNode.keyExpression());
indexBasedAccess.expr = createExpression(indexedExpressionNode.containerExpression());
return indexBasedAccess;
}
@Override
public BLangTypeConversionExpr transform(TypeCastExpressionNode typeCastExpressionNode) {
BLangTypeConversionExpr typeConversionNode = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
typeConversionNode.pos = getPosition(typeCastExpressionNode);
if (typeCastExpressionNode.typeCastParam() != null) {
typeConversionNode.typeNode = createTypeNode(typeCastExpressionNode.typeCastParam().type());
}
typeConversionNode.expr = createExpression(typeCastExpressionNode.expression());
return typeConversionNode;
}
@Override
public BLangNode transform(Token token) {
SyntaxKind kind = token.kind();
switch (kind) {
case XML_TEXT_CONTENT:
case TEMPLATE_STRING:
return createSimpleLiteral(token);
default:
throw new RuntimeException("Syntax kind is not supported: " + kind);
}
}
@Override
public BLangNode transform(InterpolationNode interpolationNode) {
return createExpression(interpolationNode.expression());
}
@Override
public BLangNode transform(TemplateExpressionNode expressionNode) {
SyntaxKind kind = expressionNode.kind();
switch (kind) {
case XML_TEMPLATE_EXPRESSION:
return expressionNode.content().get(0).apply(this);
case STRING_TEMPLATE_EXPRESSION:
return createStringTemplateLiteral(expressionNode.content(), getPosition(expressionNode));
default:
throw new RuntimeException("Syntax kind is not supported: " + kind);
}
}
@Override
public BLangNode transform(TableConstructorExpressionNode tableConstructorExpressionNode) {
BLangTableConstructorExpr tableConstructorExpr =
(BLangTableConstructorExpr) TreeBuilder.createTableConstructorExpressionNode();
tableConstructorExpr.pos = getPosition(tableConstructorExpressionNode);
for (Node node : tableConstructorExpressionNode.mappingConstructors()) {
tableConstructorExpr.addRecordLiteral((BLangRecordLiteral) node.apply(this));
}
if (tableConstructorExpressionNode.keySpecifier() != null) {
tableConstructorExpr.tableKeySpecifier =
(BLangTableKeySpecifier) tableConstructorExpressionNode.keySpecifier().apply(this);
}
return tableConstructorExpr;
}
@Override
public BLangNode transform(TrapExpressionNode trapExpressionNode) {
BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode();
trapExpr.expr = createExpression(trapExpressionNode.expression());
return trapExpr;
}
private BLangCheckedExpr createCheckExpr(DiagnosticPos pos, BLangExpression expr) {
BLangCheckedExpr checkedExpr = (BLangCheckedExpr) TreeBuilder.createCheckExpressionNode();
checkedExpr.pos = pos;
checkedExpr.expr = expr;
return checkedExpr;
}
private BLangCheckPanickedExpr createCheckPanickedExpr(DiagnosticPos pos, BLangExpression expr) {
BLangCheckPanickedExpr checkPanickedExpr =
(BLangCheckPanickedExpr) TreeBuilder.createCheckPanicExpressionNode();
checkPanickedExpr.pos = pos;
checkPanickedExpr.expr = expr;
return checkPanickedExpr;
}
@Override
public BLangNode transform(ReturnStatementNode returnStmtNode) {
BLangReturn bLReturn = (BLangReturn) TreeBuilder.createReturnNode();
bLReturn.pos = getPosition(returnStmtNode);
if (returnStmtNode.expression().isPresent()) {
bLReturn.expr = createExpression(returnStmtNode.expression().get());
} else {
BLangLiteral nilLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
nilLiteral.pos = getPosition(returnStmtNode);
nilLiteral.value = Names.NIL_VALUE;
nilLiteral.type = symTable.nilType;
bLReturn.expr = nilLiteral;
}
return bLReturn;
}
@Override
public BLangNode transform(PanicStatementNode panicStmtNode) {
BLangPanic bLPanic = (BLangPanic) TreeBuilder.createPanicNode();
bLPanic.pos = getPosition(panicStmtNode);
bLPanic.expr = createExpression(panicStmtNode.expression());
return bLPanic;
}
@Override
public BLangNode transform(ContinueStatementNode continueStmtNode) {
BLangContinue bLContinue = (BLangContinue) TreeBuilder.createContinueNode();
bLContinue.pos = getPosition(continueStmtNode);
return bLContinue;
}
@Override
public BLangNode transform(BreakStatementNode breakStmtNode) {
BLangBreak bLBreak = (BLangBreak) TreeBuilder.createBreakNode();
bLBreak.pos = getPosition(breakStmtNode);
return bLBreak;
}
@Override
public BLangNode transform(AssignmentStatementNode assignmentStmtNode) {
BLangAssignment bLAssignment = (BLangAssignment) TreeBuilder.createAssignmentNode();
BLangExpression lhsExpr = createExpression(assignmentStmtNode.varRef());
validateLvexpr(lhsExpr, DiagnosticCode.INVALID_INVOCATION_LVALUE_ASSIGNMENT);
bLAssignment.setExpression(createExpression(assignmentStmtNode.expression()));
bLAssignment.pos = getPosition(assignmentStmtNode);
bLAssignment.varRef = lhsExpr;
return bLAssignment;
}
@Override
public BLangNode transform(CompoundAssignmentStatementNode compoundAssignmentStmtNode) {
BLangCompoundAssignment bLCompAssignment = (BLangCompoundAssignment) TreeBuilder.createCompoundAssignmentNode();
bLCompAssignment.setExpression(createExpression(compoundAssignmentStmtNode.rhsExpression()));
bLCompAssignment.setVariable(
(BLangVariableReference) createExpression(compoundAssignmentStmtNode.lhsExpression())
);
bLCompAssignment.pos = getPosition(compoundAssignmentStmtNode);
bLCompAssignment.opKind = OperatorKind.valueFrom(compoundAssignmentStmtNode.binaryOperator().text());
return bLCompAssignment;
}
private void validateLvexpr(ExpressionNode lExprNode, DiagnosticCode errorCode) {
if (lExprNode.getKind() == NodeKind.INVOCATION) {
dlog.error(((BLangInvocation) lExprNode).pos, errorCode);
}
if (lExprNode.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR
|| lExprNode.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
validateLvexpr(((BLangAccessExpression) lExprNode).expr, errorCode);
}
}
@Override
public BLangNode transform(WhileStatementNode whileStmtNode) {
BLangWhile bLWhile = (BLangWhile) TreeBuilder.createWhileNode();
bLWhile.setCondition(createExpression(whileStmtNode.condition()));
bLWhile.pos = getPosition(whileStmtNode);
BLangBlockStmt bLBlockStmt = (BLangBlockStmt) whileStmtNode.whileBody().apply(this);
bLBlockStmt.pos = getPosition(whileStmtNode.whileBody());
bLWhile.setBody(bLBlockStmt);
return bLWhile;
}
@Override
public BLangNode transform(IfElseStatementNode ifElseStmtNode) {
BLangIf bLIf = (BLangIf) TreeBuilder.createIfElseStatementNode();
bLIf.pos = getPosition(ifElseStmtNode);
bLIf.setCondition(createExpression(ifElseStmtNode.condition()));
bLIf.setBody((BLangBlockStmt) ifElseStmtNode.ifBody().apply(this));
ifElseStmtNode.elseBody().ifPresent(elseBody -> {
ElseBlockNode elseNode = (ElseBlockNode) elseBody;
bLIf.setElseStatement(
(org.ballerinalang.model.tree.statements.StatementNode) elseNode.elseBody().apply(this));
});
return bLIf;
}
@Override
public BLangNode transform(BlockStatementNode blockStatement) {
BLangBlockStmt bLBlockStmt = (BLangBlockStmt) TreeBuilder.createBlockNode();
List<BLangStatement> statements = new ArrayList<>();
for (StatementNode statement : blockStatement.statements()) {
if (statement != null) {
statements.add((BLangStatement) statement.apply(this));
}
}
bLBlockStmt.stmts = statements;
return bLBlockStmt;
}
@Override
public BLangNode transform(VariableDeclarationNode varDeclaration) {
BLangSimpleVariableDef bLVarDef = (BLangSimpleVariableDef) TreeBuilder.createSimpleVariableDefinitionNode();
bLVarDef.pos = getPosition(varDeclaration);
BLangSimpleVariable simpleVar = new SimpleVarBuilder()
.with(varDeclaration.variableName().text(), getPosition(varDeclaration.variableName()))
.setTypeByNode(varDeclaration.typeName())
.setExpressionByNode(varDeclaration.initializer().orElse(null))
.setFinal(varDeclaration.finalKeyword().isPresent())
.build();
bLVarDef.setVariable(simpleVar);
return bLVarDef;
}
@Override
public BLangNode transform(ExpressionStatementNode expressionStatement) {
BLangExpressionStmt bLExpressionStmt = (BLangExpressionStmt) TreeBuilder.createExpressionStatementNode();
bLExpressionStmt.expr = (BLangExpression) expressionStatement.expression().apply(this);
bLExpressionStmt.pos = getPosition(expressionStatement);
return bLExpressionStmt;
}
@Override
public BLangNode transform(PositionalArgumentNode argumentNode) {
return createExpression(argumentNode.expression());
}
@Override
public BLangNode transform(NamedArgumentNode namedArgumentNode) {
BLangNamedArgsExpression namedArg = (BLangNamedArgsExpression) TreeBuilder.createNamedArgNode();
namedArg.pos = getPosition(namedArgumentNode);
namedArg.name = this.createIdentifier(getPosition(namedArgumentNode.argumentName()),
namedArgumentNode.argumentName().name().text());
namedArg.expr = createExpression(namedArgumentNode.expression());
return namedArg;
}
@Override
public BLangNode transform(RestArgumentNode restArgumentNode) {
return restArgumentNode.expression().apply(this);
}
@Override
public BLangNode transform(RequiredParameterNode requiredParameter) {
BLangSimpleVariable simpleVar = createSimpleVar(requiredParameter.paramName(),
requiredParameter.typeName());
Optional<Token> visibilityQual = requiredParameter.visibilityQualifier();
if (visibilityQual.isPresent() && visibilityQual.get().kind() == SyntaxKind.PUBLIC_KEYWORD) {
simpleVar.flagSet.add(Flag.PUBLIC);
}
simpleVar.pos = getPosition(requiredParameter);
return simpleVar;
}
@Override
public BLangNode transform(DefaultableParameterNode defaultableParameter) {
BLangSimpleVariable simpleVar = createSimpleVar(defaultableParameter.paramName(),
defaultableParameter.typeName());
Optional<Token> visibilityQual = defaultableParameter.visibilityQualifier();
if (visibilityQual.isPresent() && visibilityQual.get().kind() == SyntaxKind.PUBLIC_KEYWORD) {
simpleVar.flagSet.add(Flag.PUBLIC);
}
simpleVar.setInitialExpression(createExpression(defaultableParameter.expression()));
simpleVar.pos = getPosition(defaultableParameter);
return simpleVar;
}
@Override
public BLangNode transform(RestParameterNode restParameter) {
BLangSimpleVariable bLSimpleVar = createSimpleVar(restParameter.paramName(), restParameter.typeName());
BLangArrayType bLArrayType = (BLangArrayType) TreeBuilder.createArrayTypeNode();
bLArrayType.elemtype = bLSimpleVar.typeNode;
bLArrayType.dimensions = 1;
bLSimpleVar.typeNode = bLArrayType;
bLArrayType.pos = getPosition(restParameter.typeName());
bLSimpleVar.pos = getPosition(restParameter);
return bLSimpleVar;
}
@Override
public BLangNode transform(OptionalTypeDescriptorNode optTypeDescriptor) {
BLangValueType nilTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
nilTypeNode.pos = getPosition(optTypeDescriptor.questionMarkToken());
nilTypeNode.typeKind = TypeKind.NIL;
BLangUnionTypeNode unionTypeNode = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode();
unionTypeNode.memberTypeNodes.add(createTypeNode(optTypeDescriptor.typeDescriptor()));
unionTypeNode.memberTypeNodes.add(nilTypeNode);
unionTypeNode.pos = getPosition(optTypeDescriptor);
return unionTypeNode;
}
@Override
public BLangNode transform(ParameterizedTypeDescriptorNode parameterizedTypeDescNode) {
BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
BLangValueType typeNode = (BLangValueType) createBuiltInTypeNode(parameterizedTypeDescNode.parameterizedType());
refType.typeKind = typeNode.typeKind;
refType.pos = typeNode.pos;
BLangConstrainedType constrainedType = (BLangConstrainedType) TreeBuilder.createConstrainedTypeNode();
constrainedType.type = refType;
constrainedType.constraint = createTypeNode(parameterizedTypeDescNode.typeNode());
constrainedType.pos = getPosition(parameterizedTypeDescNode);
return constrainedType;
}
@Override
public BLangNode transform(KeySpecifierNode keySpecifierNode) {
BLangTableKeySpecifier tableKeySpecifierNode =
(BLangTableKeySpecifier) TreeBuilder.createTableKeySpecifierNode();
tableKeySpecifierNode.pos = getPosition(keySpecifierNode);
for (Node field : keySpecifierNode.fieldNames()) {
tableKeySpecifierNode.addFieldNameIdentifier(createIdentifier(getPosition(field), field.toString()));
}
return tableKeySpecifierNode;
}
@Override
public BLangNode transform(KeyTypeConstraintNode keyTypeConstraintNode) {
BLangTableKeyTypeConstraint tableKeyTypeConstraint = new BLangTableKeyTypeConstraint();
tableKeyTypeConstraint.pos = getPosition(keyTypeConstraintNode);
tableKeyTypeConstraint.keyType = createTypeNode(keyTypeConstraintNode.typeParameterNode());
return tableKeyTypeConstraint;
}
@Override
public BLangNode transform(TableTypeDescriptorNode tableTypeDescriptorNode) {
BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
refType.typeKind = TreeUtils.stringToTypeKind(tableTypeDescriptorNode.tableKeywordToken().text());
refType.pos = getPosition(tableTypeDescriptorNode);
BLangTableTypeNode tableTypeNode = (BLangTableTypeNode) TreeBuilder.createTableTypeNode();
tableTypeNode.pos = getPosition(tableTypeDescriptorNode);
tableTypeNode.type = refType;
tableTypeNode.constraint = createTypeNode(tableTypeDescriptorNode.rowTypeParameterNode());
if (tableTypeDescriptorNode.keyConstraintNode() != null) {
Node constraintNode = tableTypeDescriptorNode.keyConstraintNode();
if (constraintNode.kind() == SyntaxKind.KEY_TYPE_CONSTRAINT) {
tableTypeNode.tableKeyTypeConstraint =
(BLangTableKeyTypeConstraint) constraintNode.apply(this);
} else if (constraintNode.kind() == SyntaxKind.KEY_SPECIFIER) {
tableTypeNode.tableKeySpecifier = (BLangTableKeySpecifier) constraintNode.apply(this);
}
}
return tableTypeNode;
}
@Override
public BLangNode transform(SimpleNameReferenceNode simpleNameRefNode) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = getPosition(simpleNameRefNode);
varRef.variableName = createIdentifier(getPosition(simpleNameRefNode.name()), simpleNameRefNode.name().text());
varRef.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
return varRef;
}
@Override
public BLangNode transform(QualifiedNameReferenceNode qualifiedNameReferenceNode) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = getPosition(qualifiedNameReferenceNode);
varRef.variableName = createIdentifier(getPosition(qualifiedNameReferenceNode.identifier()),
qualifiedNameReferenceNode.identifier().text());
varRef.pkgAlias = createIdentifier(getPosition(qualifiedNameReferenceNode.modulePrefix()),
qualifiedNameReferenceNode.modulePrefix().text());
return varRef;
}
@Override
public BLangNode transform(XMLProcessingInstruction xmlProcessingInstruction) {
BLangXMLProcInsLiteral xmlProcInsLiteral =
(BLangXMLProcInsLiteral) TreeBuilder.createXMLProcessingIntsructionLiteralNode();
for (Node dataNode: xmlProcessingInstruction.data()) {
xmlProcInsLiteral.dataFragments.add(createExpression(dataNode));
}
xmlProcInsLiteral.target = (BLangLiteral) xmlProcessingInstruction.target().apply(this);
return xmlProcInsLiteral;
}
@Override
public BLangNode transform(XMLSimpleNameNode xmlSimpleNameNode) {
BLangLiteral bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
bLiteral.pos = getPosition(xmlSimpleNameNode);
bLiteral.type = symTable.getTypeFromTag(TypeTags.STRING);
bLiteral.value = xmlSimpleNameNode.name().text();
bLiteral.originalValue = xmlSimpleNameNode.name().text();
return bLiteral;
}
@Override
public BLangNode transform(XMLComment xmlComment) {
BLangXMLCommentLiteral xmlCommentLiteral = (BLangXMLCommentLiteral) TreeBuilder.createXMLCommentLiteralNode();
for (Node commentNode: xmlComment.content()) {
xmlCommentLiteral.textFragments.add((BLangExpression) commentNode.apply(this));
}
xmlCommentLiteral.pos = getPosition(xmlComment);
return xmlCommentLiteral;
}
@Override
public BLangNode transform(XMLElementNode xmlElementNode) {
BLangXMLElementLiteral xmlElement = (BLangXMLElementLiteral) TreeBuilder.createXMLElementLiteralNode();
xmlElement.startTagName = createExpression(xmlElementNode.startTag());
xmlElement.endTagName = createExpression(xmlElementNode.endTag());
for (Node node : xmlElementNode.content()) {
if (node.kind() == SyntaxKind.XML_TEXT) {
xmlElement.children.add(createSimpleLiteral(((XMLTextNode) node).content()));
continue;
}
xmlElement.children.add(createExpression(node));
}
xmlElement.pos = getPosition(xmlElementNode);
xmlElement.isRoot = true;
return xmlElement;
}
@Override
public BLangNode transform(XMLStartTagNode startTagNode) {
return createStartEndXMLTag(startTagNode.name(), getPosition(startTagNode));
}
@Override
public BLangNode transform(XMLEndTagNode endTagNode) {
return createStartEndXMLTag(endTagNode.name(), getPosition(endTagNode));
}
@Override
public BLangNode transform(XMLTextNode xmlTextNode) {
BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode();
xmlTextLiteral.textFragments.add(0, (BLangExpression) xmlTextNode.content().apply(this));
xmlTextLiteral.pos = getPosition(xmlTextNode);
return xmlTextLiteral;
}
@Override
protected BLangNode transformSyntaxNode(Node node) {
throw new RuntimeException("Node not supported: " + node.getClass().getSimpleName());
}
private void populateFuncSignature(BLangFunction bLFunction, FunctionSignatureNode funcSignature) {
for (ParameterNode child : funcSignature.parameters()) {
SimpleVariableNode param = (SimpleVariableNode) child.apply(this);
if (child instanceof RestParameterNode) {
bLFunction.setRestParameter(param);
} else {
bLFunction.addParameter(param);
}
}
Optional<ReturnTypeDescriptorNode> retNode = funcSignature.returnTypeDesc();
if (retNode.isPresent()) {
ReturnTypeDescriptorNode returnType = retNode.get();
bLFunction.setReturnTypeNode(createTypeNode(returnType.type()));
} else {
BLangValueType bLValueType = (BLangValueType) TreeBuilder.createValueTypeNode();
bLValueType.pos = getPosition(funcSignature);
bLValueType.typeKind = TypeKind.NIL;
bLFunction.setReturnTypeNode(bLValueType);
}
}
private BLangUnaryExpr createBLangUnaryExpr(DiagnosticPos pos, OperatorKind operatorKind, BLangExpression expr) {
BLangUnaryExpr bLUnaryExpr = (BLangUnaryExpr) TreeBuilder.createUnaryExpressionNode();
bLUnaryExpr.pos = pos;
bLUnaryExpr.operator = operatorKind;
bLUnaryExpr.expr = expr;
return bLUnaryExpr;
}
private BLangXMLQName createStartEndXMLTag(XMLNameNode xmlNameNode, DiagnosticPos pos) {
BLangXMLQName xmlName = (BLangXMLQName) TreeBuilder.createXMLQNameNode();
SyntaxKind kind = xmlNameNode.kind();
xmlName.pos = pos;
switch (kind) {
case XML_QUALIFIED_NAME:
xmlName.localname = createIdentifier(getPosition(((XMLQualifiedNameNode) xmlNameNode).name()),
((XMLQualifiedNameNode) xmlNameNode).name().toString());
xmlName.prefix = createIdentifier(getPosition(((XMLQualifiedNameNode) xmlNameNode).prefix()),
((XMLQualifiedNameNode) xmlNameNode).prefix().toString());
return xmlName;
case XML_SIMPLE_NAME:
xmlName.localname = createIdentifier(getPosition(((XMLSimpleNameNode) xmlNameNode).name()),
((XMLSimpleNameNode) xmlNameNode).name().text());
xmlName.prefix = createIdentifier(null, "");
return xmlName;
default:
throw new RuntimeException("Syntax kind is not supported: " + kind);
}
}
private BLangExpression createExpression(Node expression) {
if (isSimpleLiteral(expression.kind())) {
return createSimpleLiteral(expression);
} else if (expression.kind() == SyntaxKind.SIMPLE_NAME_REFERENCE ||
expression.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE ||
expression.kind() == SyntaxKind.IDENTIFIER_TOKEN) {
BLangNameReference nameReference = createBLangNameReference(expression);
BLangSimpleVarRef bLVarRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
bLVarRef.pos = getPosition(expression);
bLVarRef.pkgAlias = this.createIdentifier((DiagnosticPos) nameReference.pkgAlias.getPosition(),
nameReference.pkgAlias.getValue());
bLVarRef.variableName = this.createIdentifier((DiagnosticPos) nameReference.name.getPosition(),
nameReference.name.getValue());
return bLVarRef;
} else if (expression.kind() == SyntaxKind.BRACED_EXPRESSION) {
BLangGroupExpr group = (BLangGroupExpr) TreeBuilder.createGroupExpressionNode();
group.expression = (BLangExpression) expression.apply(this);
return group;
} else {
return (BLangExpression) expression.apply(this);
}
}
private BLangNode createStringTemplateLiteral(NodeList<TemplateMemberNode> memberNodes, DiagnosticPos pos) {
BLangStringTemplateLiteral stringTemplateLiteral =
(BLangStringTemplateLiteral) TreeBuilder.createStringTemplateLiteralNode();
for (Node memberNode : memberNodes) {
stringTemplateLiteral.exprs.add((BLangExpression) memberNode.apply(this));
}
stringTemplateLiteral.pos = pos;
return stringTemplateLiteral;
}
private BLangSimpleVariable createSimpleVar(Token name, Node type) {
return createSimpleVar(name, type, null, false, false, null);
}
private BLangSimpleVariable createSimpleVar(Token name, Node typeName, Node initializer, boolean isFinal,
boolean isListenerVar,
Token visibilityQualifier) {
return createSimpleVar(name.text(), getPosition(name), typeName, initializer, isFinal, isListenerVar,
visibilityQualifier);
}
private BLangSimpleVariable createSimpleVar(String name, DiagnosticPos pos, Node typeName, Node initializer,
boolean isFinal,
boolean isListenerVar,
Token visibilityQualifier) {
BLangSimpleVariable bLSimpleVar = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
bLSimpleVar.setName(this.createIdentifier(pos, name));
if (typeName == null || typeName.kind() == SyntaxKind.VAR_TYPE_DESC) {
bLSimpleVar.isDeclaredWithVar = true;
} else {
bLSimpleVar.setTypeNode(createTypeNode(typeName));
}
if (visibilityQualifier != null) {
if (visibilityQualifier.kind() == SyntaxKind.PRIVATE_KEYWORD) {
bLSimpleVar.flagSet.add(Flag.PRIVATE);
} else if (visibilityQualifier.kind() == SyntaxKind.PUBLIC_KEYWORD) {
bLSimpleVar.flagSet.add(Flag.PUBLIC);
}
}
if (isFinal) {
markVariableAsFinal(bLSimpleVar);
}
if (initializer != null) {
bLSimpleVar.setInitialExpression(createExpression(initializer));
}
if (isListenerVar) {
bLSimpleVar.flagSet.add(Flag.LISTENER);
bLSimpleVar.flagSet.add(Flag.FINAL);
}
return bLSimpleVar;
}
private BLangIdentifier createIdentifier(DiagnosticPos pos, String value) {
return createIdentifier(pos, value, null);
}
private BLangIdentifier createIdentifier(DiagnosticPos pos, String value, Set<Whitespace> ws) {
BLangIdentifier bLIdentifer = (BLangIdentifier) TreeBuilder.createIdentifierNode();
if (value == null) {
return bLIdentifer;
}
if (value.startsWith(IDENTIFIER_LITERAL_PREFIX)) {
if (!escapeQuotedIdentifier(value).matches("^[0-9a-zA-Z.]*$")) {
dlog.error(pos, DiagnosticCode.IDENTIFIER_LITERAL_ONLY_SUPPORTS_ALPHANUMERICS);
}
String unescapedValue = StringEscapeUtils.unescapeJava(value);
bLIdentifer.setValue(unescapedValue.substring(1));
bLIdentifer.originalValue = value;
bLIdentifer.setLiteral(true);
} else {
bLIdentifer.setValue(value);
bLIdentifer.setLiteral(false);
}
bLIdentifer.pos = pos;
if (ws != null) {
bLIdentifer.addWS(ws);
}
return bLIdentifer;
}
private BLangLiteral createSimpleLiteral(Node literal) {
BLangLiteral bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
SyntaxKind type = literal.kind();
int typeTag = -1;
Object value = null;
String originalValue = null;
String textValue;
if (literal instanceof BasicLiteralNode) {
textValue = ((BasicLiteralNode) literal).literalToken().text();
} else if (literal instanceof Token) {
textValue = ((Token) literal).text();
} else {
textValue = "";
}
if (type == SyntaxKind.DECIMAL_INTEGER_LITERAL || type == SyntaxKind.HEX_INTEGER_LITERAL) {
typeTag = TypeTags.INT;
value = getIntegerLiteral(literal, textValue);
originalValue = textValue;
bLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression();
} else if (type == SyntaxKind.DECIMAL_FLOATING_POINT_LITERAL) {
typeTag = NumericLiteralSupport.isDecimalDiscriminated(textValue) ? TypeTags.DECIMAL : TypeTags.FLOAT;
value = textValue;
originalValue = textValue;
bLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression();
} else if (type == SyntaxKind.HEX_FLOATING_POINT_LITERAL) {
typeTag = TypeTags.FLOAT;
value = getHexNodeValue(textValue);
originalValue = textValue;
bLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression();
} else if (type == SyntaxKind.TRUE_KEYWORD || type == SyntaxKind.FALSE_KEYWORD) {
typeTag = TypeTags.BOOLEAN;
value = Boolean.parseBoolean(textValue);
originalValue = textValue;
bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
} else if (type == SyntaxKind.STRING_LITERAL || type == SyntaxKind.XML_TEXT_CONTENT ||
type == SyntaxKind.TEMPLATE_STRING) {
String text = textValue;
if (type == SyntaxKind.STRING_LITERAL) {
text = text.substring(1, text.length() - 1);
}
String originalText = text;
Matcher matcher = UNICODE_PATTERN.matcher(text);
int position = 0;
while (matcher.find(position)) {
String hexStringVal = matcher.group(1);
int hexDecimalVal = Integer.parseInt(hexStringVal, 16);
if ((hexDecimalVal >= Constants.MIN_UNICODE && hexDecimalVal <= Constants.MIDDLE_LIMIT_UNICODE)
|| hexDecimalVal > Constants.MAX_UNICODE) {
String hexStringWithBraces = matcher.group(0);
int offset = originalText.indexOf(hexStringWithBraces) + 1;
DiagnosticPos pos = getPosition(literal);
dlog.error(new DiagnosticPos(this.diagnosticSource, pos.sLine, pos.eLine, pos.sCol + offset,
pos.sCol + offset + hexStringWithBraces.length()),
DiagnosticCode.INVALID_UNICODE, hexStringWithBraces);
}
text = matcher.replaceFirst("\\\\u" + fillWithZeros(hexStringVal));
position = matcher.end() - 2;
matcher = UNICODE_PATTERN.matcher(text);
}
text = StringEscapeUtils.unescapeJava(text);
typeTag = TypeTags.STRING;
value = text;
originalValue = textValue;
bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
} else if (type == SyntaxKind.NULL_KEYWORD) {
typeTag = TypeTags.NIL;
value = null;
originalValue = "null";
bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
} else if (type == SyntaxKind.NIL_LITERAL) {
typeTag = TypeTags.NIL;
value = null;
originalValue = "()";
bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
} else if (type == SyntaxKind.BINARY_EXPRESSION) {
typeTag = TypeTags.BYTE_ARRAY;
value = textValue;
originalValue = textValue;
if (isNumericLiteral(type)) {
bLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression();
} else {
bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
}
}
bLiteral.pos = getPosition(literal);
bLiteral.type = symTable.getTypeFromTag(typeTag);
bLiteral.type.tag = typeTag;
bLiteral.value = value;
bLiteral.originalValue = originalValue;
return bLiteral;
}
private BLangType createTypeNode(Node type) {
if (type instanceof BuiltinSimpleNameReferenceNode || type.kind() == SyntaxKind.NIL_TYPE_DESC) {
return createBuiltInTypeNode(type);
} else if (type.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE || type.kind() == SyntaxKind.IDENTIFIER_TOKEN) {
BLangUserDefinedType bLUserDefinedType = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode();
BLangNameReference nameReference = createBLangNameReference(type);
bLUserDefinedType.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
bLUserDefinedType.typeName = (BLangIdentifier) nameReference.name;
bLUserDefinedType.pos = getPosition(type);
return bLUserDefinedType;
} else if (type.kind() == SyntaxKind.SIMPLE_NAME_REFERENCE) {
SimpleNameReferenceNode nameReferenceNode = (SimpleNameReferenceNode) type;
return createTypeNode(nameReferenceNode.name());
} else if (type.kind() == SyntaxKind.INDEXED_EXPRESSION) {
return createBLangArrayType((IndexedExpressionNode) type);
} else {
return (BLangType) type.apply(this);
}
}
private BLangArrayType createBLangArrayType(IndexedExpressionNode indexedExpressionNode) {
int dimensions = 1;
List<Integer> sizes = new ArrayList<>();
while (true) {
Node keyExpr = indexedExpressionNode.keyExpression();
if (keyExpr == null) {
sizes.add(UNSEALED_ARRAY_INDICATOR);
} else {
if (keyExpr.kind() == SyntaxKind.DECIMAL_INTEGER_LITERAL) {
sizes.add(Integer.parseInt(keyExpr.toString()));
} else if (keyExpr.kind() == SyntaxKind.ASTERISK_TOKEN) {
sizes.add(OPEN_SEALED_ARRAY_INDICATOR);
} else {
}
}
if (indexedExpressionNode.containerExpression().kind() != SyntaxKind.INDEXED_EXPRESSION) {
break;
}
indexedExpressionNode = (IndexedExpressionNode) indexedExpressionNode.containerExpression();
dimensions++;
}
BLangArrayType arrayTypeNode = (BLangArrayType) TreeBuilder.createArrayTypeNode();
arrayTypeNode.pos = getPosition(indexedExpressionNode);
arrayTypeNode.elemtype = createTypeNode(indexedExpressionNode.containerExpression());
arrayTypeNode.dimensions = dimensions;
arrayTypeNode.sizes = sizes.stream().mapToInt(val -> val).toArray();
return arrayTypeNode;
}
private BLangType createBuiltInTypeNode(Node type) {
String typeText;
if (type.kind() == SyntaxKind.NIL_TYPE_DESC) {
typeText = "()";
} else if (type instanceof BuiltinSimpleNameReferenceNode) {
typeText = ((BuiltinSimpleNameReferenceNode) type).name().text();
} else {
typeText = ((Token) type).text();
}
TypeKind typeKind = TreeUtils.stringToTypeKind(typeText.replaceAll("\\s+", ""));
if (typeKind == TypeKind.JSON) {
BLangBuiltInRefTypeNode bLValueType =
(BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
bLValueType.typeKind = typeKind;
bLValueType.pos = getPosition(type);
return bLValueType;
} else {
BLangValueType bLValueType = (BLangValueType) TreeBuilder.createValueTypeNode();
bLValueType.typeKind = typeKind;
bLValueType.pos = getPosition(type);
return bLValueType;
}
}
private VariableNode createBasicVarNodeWithoutType(DiagnosticPos pos, Set<Whitespace> ws, String identifier,
DiagnosticPos identifierPos, ExpressionNode expr) {
BLangSimpleVariable bLSimpleVar = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
bLSimpleVar.pos = pos;
IdentifierNode name = this.createIdentifier(identifierPos, identifier, ws);
((BLangIdentifier) name).pos = identifierPos;
bLSimpleVar.setName(name);
bLSimpleVar.addWS(ws);
if (expr != null) {
bLSimpleVar.setInitialExpression(expr);
}
return bLSimpleVar;
}
private BLangInvocation createBLangInvocation(Node nameNode, NodeList<FunctionArgumentNode> arguments,
DiagnosticPos position) {
BLangInvocation bLInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
BLangNameReference reference = createBLangNameReference(nameNode);
bLInvocation.pkgAlias = (BLangIdentifier) reference.pkgAlias;
bLInvocation.name = (BLangIdentifier) reference.name;
List<BLangExpression> args = new ArrayList<>();
arguments.iterator().forEachRemaining(arg -> args.add((BLangExpression) arg.apply(this)));
bLInvocation.argExprs = args;
bLInvocation.pos = position;
return bLInvocation;
}
private BLangNameReference createBLangNameReference(Node node) {
if (node.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
QualifiedNameReferenceNode iNode = (QualifiedNameReferenceNode) node;
Token modulePrefix = iNode.modulePrefix();
IdentifierToken identifier = iNode.identifier();
BLangIdentifier pkgAlias = this.createIdentifier(getPosition(modulePrefix), modulePrefix.text());
BLangIdentifier name = this.createIdentifier(getPosition(identifier), identifier.text());
return new BLangNameReference(getPosition(node), null, pkgAlias, name);
} else if (node.kind() == SyntaxKind.IDENTIFIER_TOKEN || node.kind() == SyntaxKind.ERROR_KEYWORD) {
Token token = (Token) node;
BLangIdentifier pkgAlias = this.createIdentifier(null, "");
BLangIdentifier name = this.createIdentifier(getPosition(token), token.text());
return new BLangNameReference(getPosition(node), null, pkgAlias, name);
} else if (node.kind() == SyntaxKind.NEW_KEYWORD) {
Token iToken = (Token) node;
BLangIdentifier pkgAlias = this.createIdentifier(getPosition(iToken), "");
BLangIdentifier name = this.createIdentifier(getPosition(iToken), iToken.text());
return new BLangNameReference(getPosition(node), null, pkgAlias, name);
} else {
SimpleNameReferenceNode nameReferenceNode = (SimpleNameReferenceNode) node;
return createBLangNameReference(nameReferenceNode.name());
}
}
private Object getIntegerLiteral(Node literal, String nodeValue) {
SyntaxKind type = literal.kind();
if (type == SyntaxKind.DECIMAL_INTEGER_LITERAL) {
return parseLong(literal, nodeValue, nodeValue, 10, DiagnosticCode.INTEGER_TOO_SMALL,
DiagnosticCode.INTEGER_TOO_LARGE);
} else if (type == SyntaxKind.HEX_INTEGER_LITERAL) {
String processedNodeValue = nodeValue.toLowerCase().replace("0x", "");
return parseLong(literal, nodeValue, processedNodeValue, 16,
DiagnosticCode.HEXADECIMAL_TOO_SMALL, DiagnosticCode.HEXADECIMAL_TOO_LARGE);
}
return null;
}
private Object parseLong(Node literal, String originalNodeValue,
String processedNodeValue, int radix,
DiagnosticCode code1, DiagnosticCode code2) {
try {
return Long.parseLong(processedNodeValue, radix);
} catch (Exception e) {
DiagnosticPos pos = getPosition(literal);
if (originalNodeValue.startsWith("-")) {
dlog.error(pos, code1, originalNodeValue);
} else {
dlog.error(pos, code2, originalNodeValue);
}
}
return originalNodeValue;
}
private String getHexNodeValue(String value) {
if (!(value.contains("p") || value.contains("P"))) {
value = value + "p0";
}
return value;
}
private String fillWithZeros(String str) {
while (str.length() < 4) {
str = "0".concat(str);
}
return str;
}
private void markVariableAsFinal(BLangVariable variable) {
variable.flagSet.add(Flag.FINAL);
switch (variable.getKind()) {
case TUPLE_VARIABLE:
BLangTupleVariable tupleVariable = (BLangTupleVariable) variable;
tupleVariable.memberVariables.forEach(this::markVariableAsFinal);
if (tupleVariable.restVariable != null) {
markVariableAsFinal(tupleVariable.restVariable);
}
break;
case RECORD_VARIABLE:
BLangRecordVariable recordVariable = (BLangRecordVariable) variable;
recordVariable.variableList.stream()
.map(BLangRecordVariable.BLangRecordVariableKeyValue::getValue)
.forEach(this::markVariableAsFinal);
if (recordVariable.restParam != null) {
markVariableAsFinal((BLangVariable) recordVariable.restParam);
}
break;
case ERROR_VARIABLE:
BLangErrorVariable errorVariable = (BLangErrorVariable) variable;
markVariableAsFinal(errorVariable.reason);
errorVariable.detail.forEach(entry -> markVariableAsFinal(entry.valueBindingPattern));
if (errorVariable.restDetail != null) {
markVariableAsFinal(errorVariable.restDetail);
}
break;
}
}
private boolean isSimpleLiteral(SyntaxKind syntaxKind) {
switch (syntaxKind) {
case STRING_LITERAL:
case DECIMAL_INTEGER_LITERAL:
case HEX_INTEGER_LITERAL:
case DECIMAL_FLOATING_POINT_LITERAL:
case HEX_FLOATING_POINT_LITERAL:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case NIL_LITERAL:
case NULL_KEYWORD:
return true;
default:
return false;
}
}
private boolean isNumericLiteral(SyntaxKind syntaxKind) {
switch (syntaxKind) {
case DECIMAL_INTEGER_LITERAL:
case HEX_INTEGER_LITERAL:
case DECIMAL_FLOATING_POINT_LITERAL:
case HEX_FLOATING_POINT_LITERAL:
return true;
default:
return false;
}
}
private static String escapeQuotedIdentifier(String identifier) {
if (identifier.startsWith(IDENTIFIER_LITERAL_PREFIX)) {
identifier = StringEscapeUtils.unescapeJava(identifier).substring(1);
}
return identifier;
}
private boolean isPresent(Node node) {
return node.kind() != SyntaxKind.NONE;
}
private BLangUnionTypeNode addUnionType(BLangType lhsTypeNode, BLangType rhsTypeNode, DiagnosticPos position) {
BLangUnionTypeNode unionTypeNode;
if (rhsTypeNode.getKind() == NodeKind.UNION_TYPE_NODE) {
unionTypeNode = (BLangUnionTypeNode) rhsTypeNode;
unionTypeNode.memberTypeNodes.add(0, lhsTypeNode);
} else if (lhsTypeNode.getKind() == NodeKind.UNION_TYPE_NODE) {
unionTypeNode = (BLangUnionTypeNode) lhsTypeNode;
unionTypeNode.memberTypeNodes.add(rhsTypeNode);
} else {
unionTypeNode = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode();
unionTypeNode.memberTypeNodes.add(lhsTypeNode);
unionTypeNode.memberTypeNodes.add(rhsTypeNode);
}
unionTypeNode.pos = position;
return unionTypeNode;
}
private class SimpleVarBuilder {
private BLangIdentifier name;
private BLangType type;
private boolean isDeclaredWithVar;
private Set<Flag> flags = new HashSet<>();
private boolean isFinal;
private ExpressionNode expr;
public BLangSimpleVariable build() {
BLangSimpleVariable bLSimpleVar = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
bLSimpleVar.setName(this.name);
bLSimpleVar.setTypeNode(this.type);
bLSimpleVar.isDeclaredWithVar = this.isDeclaredWithVar;
bLSimpleVar.setTypeNode(this.type);
bLSimpleVar.flagSet.addAll(this.flags);
if (this.isFinal) {
markVariableAsFinal(bLSimpleVar);
}
bLSimpleVar.setInitialExpression(this.expr);
return bLSimpleVar;
}
public SimpleVarBuilder with(String name) {
this.name = createIdentifier(null, name);
return this;
}
public SimpleVarBuilder with(String name, DiagnosticPos identifierPos) {
this.name = createIdentifier(identifierPos, name);
return this;
}
public SimpleVarBuilder setTypeByNode(Node typeName) {
if (typeName == null || typeName.kind() == SyntaxKind.VAR_TYPE_DESC) {
this.isDeclaredWithVar = true;
}
this.type = createTypeNode(typeName);
return this;
}
public SimpleVarBuilder setExpressionByNode(Node initExprNode) {
this.expr = initExprNode != null ? createExpression(initExprNode) : null;
return this;
}
public SimpleVarBuilder setExpression(ExpressionNode expression) {
this.expr = expression;
return this;
}
public SimpleVarBuilder isDeclaredWithVar() {
this.isDeclaredWithVar = true;
return this;
}
public SimpleVarBuilder isFinal() {
this.isFinal = true;
return this;
}
public SimpleVarBuilder isListenerVar() {
this.flags.add(Flag.LISTENER);
this.flags.add(Flag.FINAL);
return this;
}
public SimpleVarBuilder setVisibility(Token visibilityQualifier) {
if (visibilityQualifier != null) {
if (visibilityQualifier.kind() == SyntaxKind.PRIVATE_KEYWORD) {
this.flags.add(Flag.PRIVATE);
} else if (visibilityQualifier.kind() == SyntaxKind.PUBLIC_KEYWORD) {
this.flags.add(Flag.PUBLIC);
}
}
return this;
}
public SimpleVarBuilder setFinal(boolean present) {
this.isFinal = present;
return this;
}
public SimpleVarBuilder setOptional(boolean present) {
if (present) {
this.flags.add(Flag.PUBLIC);
} else {
this.flags.remove(Flag.PUBLIC);
}
return this;
}
public SimpleVarBuilder setRequired(boolean present) {
if (present) {
this.flags.add(Flag.REQUIRED);
} else {
this.flags.remove(Flag.REQUIRED);
}
return this;
}
public SimpleVarBuilder isPublic() {
this.flags.add(Flag.PUBLIC);
return this;
}
public SimpleVarBuilder isWorkerVar() {
this.flags.add(Flag.WORKER);
return this;
}
}
} |
> sessionTokenApplicable: In java V4: we also skip applying the session token when Eventual Consistency is explicitly requested on request-level for data plane operations. This is not included in V2 and .net SDK. If consistency is eventual there is no need to set session token. This is intentional. Does this PR change this behaviour? | private void applySessionToken(RxDocumentServiceRequest request) {
Map<String, String> headers = request.getHeaders();
Objects.requireNonNull(headers, "RxDocumentServiceRequest::headers is required and cannot be null");
String requestConsistencyLevel = headers.get(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL);
boolean sessionTokenApplicable =
Strings.areEqual(requestConsistencyLevel, ConsistencyLevel.SESSION.toString()) ||
(this.defaultConsistencyLevel == ConsistencyLevel.SESSION &&
(!request.isReadOnlyRequest() ||
request.getResourceType() != ResourceType.Document ||
!Strings.areEqual(requestConsistencyLevel, ConsistencyLevel.EVENTUAL.toString())));
if (!Strings.isNullOrEmpty(request.getHeaders().get(HttpConstants.HttpHeaders.SESSION_TOKEN))) {
if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) {
request.getHeaders().remove(HttpConstants.HttpHeaders.SESSION_TOKEN);
}
return;
}
if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) {
return;
}
String sessionToken = this.sessionContainer.resolveGlobalSessionToken(request);
if (!Strings.isNullOrEmpty(sessionToken)) {
headers.put(HttpConstants.HttpHeaders.SESSION_TOKEN, sessionToken);
}
} | if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) { | private void applySessionToken(RxDocumentServiceRequest request) {
Map<String, String> headers = request.getHeaders();
Objects.requireNonNull(headers, "RxDocumentServiceRequest::headers is required and cannot be null");
String requestConsistencyLevel = headers.get(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL);
boolean sessionTokenApplicable =
Strings.areEqual(requestConsistencyLevel, ConsistencyLevel.SESSION.toString()) ||
(this.defaultConsistencyLevel == ConsistencyLevel.SESSION &&
(!request.isReadOnlyRequest() ||
request.getResourceType() != ResourceType.Document ||
!Strings.areEqual(requestConsistencyLevel, ConsistencyLevel.EVENTUAL.toString())));
if (!Strings.isNullOrEmpty(request.getHeaders().get(HttpConstants.HttpHeaders.SESSION_TOKEN))) {
if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) {
request.getHeaders().remove(HttpConstants.HttpHeaders.SESSION_TOKEN);
}
return;
}
if (!sessionTokenApplicable || isMasterOperation(request.getResourceType(), request.getOperationType())) {
return;
}
String sessionToken = this.sessionContainer.resolveGlobalSessionToken(request);
if (!Strings.isNullOrEmpty(sessionToken)) {
headers.put(HttpConstants.HttpHeaders.SESSION_TOKEN, sessionToken);
}
} | class RxGatewayStoreModel implements RxStoreModel {
private final static byte[] EMPTY_BYTE_ARRAY = {};
private final DiagnosticsClientContext clientContext;
private final Logger logger = LoggerFactory.getLogger(RxGatewayStoreModel.class);
private final Map<String, String> defaultHeaders;
private final HttpClient httpClient;
private final QueryCompatibilityMode queryCompatibilityMode;
private final GlobalEndpointManager globalEndpointManager;
private ConsistencyLevel defaultConsistencyLevel;
private ISessionContainer sessionContainer;
public RxGatewayStoreModel(
DiagnosticsClientContext clientContext,
ISessionContainer sessionContainer,
ConsistencyLevel defaultConsistencyLevel,
QueryCompatibilityMode queryCompatibilityMode,
UserAgentContainer userAgentContainer,
GlobalEndpointManager globalEndpointManager,
HttpClient httpClient) {
this.clientContext = clientContext;
this.defaultHeaders = new HashMap<>();
this.defaultHeaders.put(HttpConstants.HttpHeaders.CACHE_CONTROL,
"no-cache");
this.defaultHeaders.put(HttpConstants.HttpHeaders.VERSION,
HttpConstants.Versions.CURRENT_VERSION);
if (userAgentContainer == null) {
userAgentContainer = new UserAgentContainer();
}
this.defaultHeaders.put(HttpConstants.HttpHeaders.USER_AGENT, userAgentContainer.getUserAgent());
if (defaultConsistencyLevel != null) {
this.defaultHeaders.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL,
defaultConsistencyLevel.toString());
}
this.defaultConsistencyLevel = defaultConsistencyLevel;
this.globalEndpointManager = globalEndpointManager;
this.queryCompatibilityMode = queryCompatibilityMode;
this.httpClient = httpClient;
this.sessionContainer = sessionContainer;
}
private Mono<RxDocumentServiceResponse> create(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> patch(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.PATCH);
}
private Mono<RxDocumentServiceResponse> upsert(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> read(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.GET);
}
private Mono<RxDocumentServiceResponse> replace(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.PUT);
}
private Mono<RxDocumentServiceResponse> delete(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.DELETE);
}
private Mono<RxDocumentServiceResponse> execute(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> readFeed(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.GET);
}
private Mono<RxDocumentServiceResponse> query(RxDocumentServiceRequest request) {
if(request.getOperationType() != OperationType.QueryPlan) {
request.getHeaders().put(HttpConstants.HttpHeaders.IS_QUERY, "true");
}
switch (this.queryCompatibilityMode) {
case SqlQuery:
request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE,
RuntimeConstants.MediaTypes.SQL);
break;
case Default:
case Query:
default:
request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE,
RuntimeConstants.MediaTypes.QUERY_JSON);
break;
}
return this.performRequest(request, HttpMethod.POST);
}
/**
* Given the request it creates an flux which upon subscription issues HTTP call and emits one RxDocumentServiceResponse.
*
* @param request
* @param method
* @return Flux<RxDocumentServiceResponse>
*/
public Mono<RxDocumentServiceResponse> performRequest(RxDocumentServiceRequest request, HttpMethod method) {
try {
if (request.requestContext.cosmosDiagnostics == null) {
request.requestContext.cosmosDiagnostics = clientContext.createDiagnostics();
}
URI uri = getUri(request);
request.requestContext.resourcePhysicalAddress = uri.toString();
HttpHeaders httpHeaders = this.getHttpRequestHeaders(request.getHeaders());
Flux<byte[]> contentAsByteArray = request.getContentAsByteArrayFlux();
HttpRequest httpRequest = new HttpRequest(method,
uri,
uri.getPort(),
httpHeaders,
contentAsByteArray);
Duration responseTimeout = Duration.ofSeconds(Configs.getHttpResponseTimeoutInSeconds());
if (OperationType.QueryPlan.equals(request.getOperationType())) {
responseTimeout = Duration.ofSeconds(Configs.getQueryPlanResponseTimeoutInSeconds());
} else if (request.isAddressRefresh()) {
responseTimeout = Duration.ofSeconds(Configs.getAddressRefreshResponseTimeoutInSeconds());
}
Mono<HttpResponse> httpResponseMono = this.httpClient.send(httpRequest, responseTimeout);
return toDocumentServiceResponse(httpResponseMono, request);
} catch (Exception e) {
return Mono.error(e);
}
}
private HttpHeaders getHttpRequestHeaders(Map<String, String> headers) {
HttpHeaders httpHeaders = new HttpHeaders(this.defaultHeaders.size());
for (Entry<String, String> entry : this.defaultHeaders.entrySet()) {
if (!headers.containsKey(entry.getKey())) {
httpHeaders.set(entry.getKey(), entry.getValue());
}
}
if (headers != null) {
for (Entry<String, String> entry : headers.entrySet()) {
if (entry.getValue() == null) {
httpHeaders.set(entry.getKey(), "");
} else {
httpHeaders.set(entry.getKey(), entry.getValue());
}
}
}
return httpHeaders;
}
private URI getUri(RxDocumentServiceRequest request) throws URISyntaxException {
URI rootUri = request.getEndpointOverride();
if (rootUri == null) {
if (request.getIsMedia()) {
rootUri = this.globalEndpointManager.getWriteEndpoints().get(0);
} else {
rootUri = this.globalEndpointManager.resolveServiceEndpoint(request);
}
}
String path = PathsHelper.generatePath(request.getResourceType(), request, request.isFeed);
if(request.getResourceType().equals(ResourceType.DatabaseAccount)) {
path = StringUtils.EMPTY;
}
return new URI("https",
null,
rootUri.getHost(),
rootUri.getPort(),
ensureSlashPrefixed(path),
null,
null);
}
private String ensureSlashPrefixed(String path) {
if (path == null) {
return null;
}
if (path.startsWith("/")) {
return path;
}
return "/" + path;
}
/**
* Transforms the reactor netty's client response Observable to RxDocumentServiceResponse Observable.
*
*
* Once the customer code subscribes to the observable returned by the CRUD APIs,
* the subscription goes up till it reaches the source reactor netty's observable, and at that point the HTTP invocation will be made.
*
* @param httpResponseMono
* @param request
* @return {@link Mono}
*/
private Mono<RxDocumentServiceResponse> toDocumentServiceResponse(Mono<HttpResponse> httpResponseMono,
RxDocumentServiceRequest request) {
return httpResponseMono.flatMap(httpResponse -> {
HttpHeaders httpResponseHeaders = httpResponse.headers();
int httpResponseStatus = httpResponse.statusCode();
Mono<byte[]> contentObservable = httpResponse
.bodyAsByteArray()
.switchIfEmpty(Mono.just(EMPTY_BYTE_ARRAY));
return contentObservable
.map(content -> {
ReactorNettyRequestRecord reactorNettyRequestRecord = httpResponse.request().reactorNettyRequestRecord();
if (reactorNettyRequestRecord != null) {
reactorNettyRequestRecord.setTimeCompleted(Instant.now());
BridgeInternal.setTransportClientRequestTimelineOnDiagnostics(request.requestContext.cosmosDiagnostics,
reactorNettyRequestRecord.takeTimelineSnapshot());
}
validateOrThrow(request, HttpResponseStatus.valueOf(httpResponseStatus), httpResponseHeaders, content);
StoreResponse rsp = new StoreResponse(httpResponseStatus,
HttpUtils.unescape(httpResponseHeaders.toMap().entrySet()),
content);
DirectBridgeInternal.setRequestTimeline(rsp, reactorNettyRequestRecord.takeTimelineSnapshot());
if (request.requestContext.cosmosDiagnostics != null) {
BridgeInternal.recordGatewayResponse(request.requestContext.cosmosDiagnostics, request, rsp, null);
DirectBridgeInternal.setCosmosDiagnostics(rsp, request.requestContext.cosmosDiagnostics);
}
return rsp;
})
.single();
}).map(rsp -> new RxDocumentServiceResponse(this.clientContext, rsp))
.onErrorResume(throwable -> {
Throwable unwrappedException = reactor.core.Exceptions.unwrap(throwable);
if (!(unwrappedException instanceof Exception)) {
logger.error("Unexpected failure {}", unwrappedException.getMessage(), unwrappedException);
return Mono.error(unwrappedException);
}
Exception exception = (Exception) unwrappedException;
CosmosException dce;
if (!(exception instanceof CosmosException)) {
logger.error("Network failure", exception);
dce = BridgeInternal.createCosmosException(request.requestContext.resourcePhysicalAddress, 0, exception);
BridgeInternal.setRequestHeaders(dce, request.getHeaders());
} else {
dce = (CosmosException) exception;
}
if (WebExceptionUtility.isNetworkFailure(dce)) {
if (WebExceptionUtility.isReadTimeoutException(dce)) {
BridgeInternal.setSubStatusCode(dce, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_READ_TIMEOUT);
} else {
BridgeInternal.setSubStatusCode(dce, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_UNAVAILABLE);
}
}
if (request.requestContext.cosmosDiagnostics != null) {
BridgeInternal.recordGatewayResponse(request.requestContext.cosmosDiagnostics, request, null, dce);
BridgeInternal.setCosmosDiagnostics(dce, request.requestContext.cosmosDiagnostics);
}
return Mono.error(dce);
});
}
private void validateOrThrow(RxDocumentServiceRequest request,
HttpResponseStatus status,
HttpHeaders headers,
byte[] bodyAsBytes) {
int statusCode = status.code();
if (statusCode >= HttpConstants.StatusCodes.MINIMUM_STATUSCODE_AS_ERROR_GATEWAY) {
String statusCodeString = status.reasonPhrase() != null
? status.reasonPhrase().replace(" ", "")
: "";
String body = bodyAsBytes != null ? new String(bodyAsBytes) : null;
CosmosError cosmosError;
cosmosError = (StringUtils.isNotEmpty(body)) ? new CosmosError(body) : new CosmosError();
cosmosError = new CosmosError(statusCodeString,
String.format("%s, StatusCode: %s", cosmosError.getMessage(), statusCodeString),
cosmosError.getPartitionedQueryExecutionInfo());
CosmosException dce = BridgeInternal.createCosmosException(request.requestContext.resourcePhysicalAddress, statusCode, cosmosError, headers.toMap());
BridgeInternal.setRequestHeaders(dce, request.getHeaders());
throw dce;
}
}
private Mono<RxDocumentServiceResponse> invokeAsyncInternal(RxDocumentServiceRequest request) {
switch (request.getOperationType()) {
case Create:
case Batch:
return this.create(request);
case Patch:
return this.patch(request);
case Upsert:
return this.upsert(request);
case Delete:
return this.delete(request);
case ExecuteJavaScript:
return this.execute(request);
case Read:
return this.read(request);
case ReadFeed:
return this.readFeed(request);
case Replace:
return this.replace(request);
case SqlQuery:
case Query:
case QueryPlan:
return this.query(request);
default:
throw new IllegalStateException("Unknown operation setType " + request.getOperationType());
}
}
private Mono<RxDocumentServiceResponse> invokeAsync(RxDocumentServiceRequest request) {
Callable<Mono<RxDocumentServiceResponse>> funcDelegate = () -> invokeAsyncInternal(request).single();
return BackoffRetryUtility.executeRetry(funcDelegate, new WebExceptionRetryPolicy());
}
@Override
public Mono<RxDocumentServiceResponse> processMessage(RxDocumentServiceRequest request) {
this.applySessionToken(request);
Mono<RxDocumentServiceResponse> responseObs = invokeAsync(request);
return responseObs.onErrorResume(
e -> {
CosmosException dce = Utils.as(e, CosmosException.class);
if (dce == null) {
logger.error("unexpected failure {}", e.getMessage(), e);
return Mono.error(e);
}
if ((!ReplicatedResourceClientUtils.isMasterResource(request.getResourceType())) &&
(dce.getStatusCode() == HttpConstants.StatusCodes.PRECONDITION_FAILED ||
dce.getStatusCode() == HttpConstants.StatusCodes.CONFLICT ||
(
dce.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND &&
!Exceptions.isSubStatusCode(dce,
HttpConstants.SubStatusCodes.READ_SESSION_NOT_AVAILABLE)))) {
this.captureSessionToken(request, dce.getResponseHeaders());
}
return Mono.error(dce);
}
).map(response ->
{
this.captureSessionToken(request, response.getResponseHeaders());
return response;
}
);
}
private void captureSessionToken(RxDocumentServiceRequest request, Map<String, String> responseHeaders) {
if (request.getResourceType() == ResourceType.DocumentCollection &&
request.getOperationType() == OperationType.Delete) {
String resourceId;
if (request.getIsNameBased()) {
resourceId = responseHeaders.get(HttpConstants.HttpHeaders.OWNER_ID);
} else {
resourceId = request.getResourceId();
}
this.sessionContainer.clearTokenByResourceId(resourceId);
} else {
this.sessionContainer.setSessionToken(request, responseHeaders);
}
}
private static boolean isMasterOperation(ResourceType resourceType, OperationType operationType) {
return ReplicatedResourceClientUtils.isMasterResource(resourceType) ||
isStoredProcedureMasterOperation(resourceType, operationType) ||
operationType == OperationType.QueryPlan;
}
private static boolean isStoredProcedureMasterOperation(ResourceType resourceType, OperationType operationType) {
return resourceType == ResourceType.StoredProcedure && operationType != OperationType.ExecuteJavaScript;
}
} | class RxGatewayStoreModel implements RxStoreModel {
private final static byte[] EMPTY_BYTE_ARRAY = {};
private final DiagnosticsClientContext clientContext;
private final Logger logger = LoggerFactory.getLogger(RxGatewayStoreModel.class);
private final Map<String, String> defaultHeaders;
private final HttpClient httpClient;
private final QueryCompatibilityMode queryCompatibilityMode;
private final GlobalEndpointManager globalEndpointManager;
private ConsistencyLevel defaultConsistencyLevel;
private ISessionContainer sessionContainer;
public RxGatewayStoreModel(
DiagnosticsClientContext clientContext,
ISessionContainer sessionContainer,
ConsistencyLevel defaultConsistencyLevel,
QueryCompatibilityMode queryCompatibilityMode,
UserAgentContainer userAgentContainer,
GlobalEndpointManager globalEndpointManager,
HttpClient httpClient) {
this.clientContext = clientContext;
this.defaultHeaders = new HashMap<>();
this.defaultHeaders.put(HttpConstants.HttpHeaders.CACHE_CONTROL,
"no-cache");
this.defaultHeaders.put(HttpConstants.HttpHeaders.VERSION,
HttpConstants.Versions.CURRENT_VERSION);
if (userAgentContainer == null) {
userAgentContainer = new UserAgentContainer();
}
this.defaultHeaders.put(HttpConstants.HttpHeaders.USER_AGENT, userAgentContainer.getUserAgent());
if (defaultConsistencyLevel != null) {
this.defaultHeaders.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL,
defaultConsistencyLevel.toString());
}
this.defaultConsistencyLevel = defaultConsistencyLevel;
this.globalEndpointManager = globalEndpointManager;
this.queryCompatibilityMode = queryCompatibilityMode;
this.httpClient = httpClient;
this.sessionContainer = sessionContainer;
}
private Mono<RxDocumentServiceResponse> create(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> patch(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.PATCH);
}
private Mono<RxDocumentServiceResponse> upsert(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> read(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.GET);
}
private Mono<RxDocumentServiceResponse> replace(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.PUT);
}
private Mono<RxDocumentServiceResponse> delete(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.DELETE);
}
private Mono<RxDocumentServiceResponse> execute(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.POST);
}
private Mono<RxDocumentServiceResponse> readFeed(RxDocumentServiceRequest request) {
return this.performRequest(request, HttpMethod.GET);
}
private Mono<RxDocumentServiceResponse> query(RxDocumentServiceRequest request) {
if(request.getOperationType() != OperationType.QueryPlan) {
request.getHeaders().put(HttpConstants.HttpHeaders.IS_QUERY, "true");
}
switch (this.queryCompatibilityMode) {
case SqlQuery:
request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE,
RuntimeConstants.MediaTypes.SQL);
break;
case Default:
case Query:
default:
request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE,
RuntimeConstants.MediaTypes.QUERY_JSON);
break;
}
return this.performRequest(request, HttpMethod.POST);
}
/**
* Given the request it creates an flux which upon subscription issues HTTP call and emits one RxDocumentServiceResponse.
*
* @param request
* @param method
* @return Flux<RxDocumentServiceResponse>
*/
public Mono<RxDocumentServiceResponse> performRequest(RxDocumentServiceRequest request, HttpMethod method) {
try {
if (request.requestContext.cosmosDiagnostics == null) {
request.requestContext.cosmosDiagnostics = clientContext.createDiagnostics();
}
URI uri = getUri(request);
request.requestContext.resourcePhysicalAddress = uri.toString();
HttpHeaders httpHeaders = this.getHttpRequestHeaders(request.getHeaders());
Flux<byte[]> contentAsByteArray = request.getContentAsByteArrayFlux();
HttpRequest httpRequest = new HttpRequest(method,
uri,
uri.getPort(),
httpHeaders,
contentAsByteArray);
Duration responseTimeout = Duration.ofSeconds(Configs.getHttpResponseTimeoutInSeconds());
if (OperationType.QueryPlan.equals(request.getOperationType())) {
responseTimeout = Duration.ofSeconds(Configs.getQueryPlanResponseTimeoutInSeconds());
} else if (request.isAddressRefresh()) {
responseTimeout = Duration.ofSeconds(Configs.getAddressRefreshResponseTimeoutInSeconds());
}
Mono<HttpResponse> httpResponseMono = this.httpClient.send(httpRequest, responseTimeout);
return toDocumentServiceResponse(httpResponseMono, request);
} catch (Exception e) {
return Mono.error(e);
}
}
private HttpHeaders getHttpRequestHeaders(Map<String, String> headers) {
HttpHeaders httpHeaders = new HttpHeaders(this.defaultHeaders.size());
for (Entry<String, String> entry : this.defaultHeaders.entrySet()) {
if (!headers.containsKey(entry.getKey())) {
httpHeaders.set(entry.getKey(), entry.getValue());
}
}
if (headers != null) {
for (Entry<String, String> entry : headers.entrySet()) {
if (entry.getValue() == null) {
httpHeaders.set(entry.getKey(), "");
} else {
httpHeaders.set(entry.getKey(), entry.getValue());
}
}
}
return httpHeaders;
}
private URI getUri(RxDocumentServiceRequest request) throws URISyntaxException {
URI rootUri = request.getEndpointOverride();
if (rootUri == null) {
if (request.getIsMedia()) {
rootUri = this.globalEndpointManager.getWriteEndpoints().get(0);
} else {
rootUri = this.globalEndpointManager.resolveServiceEndpoint(request);
}
}
String path = PathsHelper.generatePath(request.getResourceType(), request, request.isFeed);
if(request.getResourceType().equals(ResourceType.DatabaseAccount)) {
path = StringUtils.EMPTY;
}
return new URI("https",
null,
rootUri.getHost(),
rootUri.getPort(),
ensureSlashPrefixed(path),
null,
null);
}
private String ensureSlashPrefixed(String path) {
if (path == null) {
return null;
}
if (path.startsWith("/")) {
return path;
}
return "/" + path;
}
/**
* Transforms the reactor netty's client response Observable to RxDocumentServiceResponse Observable.
*
*
* Once the customer code subscribes to the observable returned by the CRUD APIs,
* the subscription goes up till it reaches the source reactor netty's observable, and at that point the HTTP invocation will be made.
*
* @param httpResponseMono
* @param request
* @return {@link Mono}
*/
private Mono<RxDocumentServiceResponse> toDocumentServiceResponse(Mono<HttpResponse> httpResponseMono,
RxDocumentServiceRequest request) {
return httpResponseMono.flatMap(httpResponse -> {
HttpHeaders httpResponseHeaders = httpResponse.headers();
int httpResponseStatus = httpResponse.statusCode();
Mono<byte[]> contentObservable = httpResponse
.bodyAsByteArray()
.switchIfEmpty(Mono.just(EMPTY_BYTE_ARRAY));
return contentObservable
.map(content -> {
ReactorNettyRequestRecord reactorNettyRequestRecord = httpResponse.request().reactorNettyRequestRecord();
if (reactorNettyRequestRecord != null) {
reactorNettyRequestRecord.setTimeCompleted(Instant.now());
BridgeInternal.setTransportClientRequestTimelineOnDiagnostics(request.requestContext.cosmosDiagnostics,
reactorNettyRequestRecord.takeTimelineSnapshot());
}
validateOrThrow(request, HttpResponseStatus.valueOf(httpResponseStatus), httpResponseHeaders, content);
StoreResponse rsp = new StoreResponse(httpResponseStatus,
HttpUtils.unescape(httpResponseHeaders.toMap().entrySet()),
content);
DirectBridgeInternal.setRequestTimeline(rsp, reactorNettyRequestRecord.takeTimelineSnapshot());
if (request.requestContext.cosmosDiagnostics != null) {
BridgeInternal.recordGatewayResponse(request.requestContext.cosmosDiagnostics, request, rsp, null);
DirectBridgeInternal.setCosmosDiagnostics(rsp, request.requestContext.cosmosDiagnostics);
}
return rsp;
})
.single();
}).map(rsp -> new RxDocumentServiceResponse(this.clientContext, rsp))
.onErrorResume(throwable -> {
Throwable unwrappedException = reactor.core.Exceptions.unwrap(throwable);
if (!(unwrappedException instanceof Exception)) {
logger.error("Unexpected failure {}", unwrappedException.getMessage(), unwrappedException);
return Mono.error(unwrappedException);
}
Exception exception = (Exception) unwrappedException;
CosmosException dce;
if (!(exception instanceof CosmosException)) {
logger.error("Network failure", exception);
dce = BridgeInternal.createCosmosException(request.requestContext.resourcePhysicalAddress, 0, exception);
BridgeInternal.setRequestHeaders(dce, request.getHeaders());
} else {
dce = (CosmosException) exception;
}
if (WebExceptionUtility.isNetworkFailure(dce)) {
if (WebExceptionUtility.isReadTimeoutException(dce)) {
BridgeInternal.setSubStatusCode(dce, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_READ_TIMEOUT);
} else {
BridgeInternal.setSubStatusCode(dce, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_UNAVAILABLE);
}
}
if (request.requestContext.cosmosDiagnostics != null) {
BridgeInternal.recordGatewayResponse(request.requestContext.cosmosDiagnostics, request, null, dce);
BridgeInternal.setCosmosDiagnostics(dce, request.requestContext.cosmosDiagnostics);
}
return Mono.error(dce);
});
}
private void validateOrThrow(RxDocumentServiceRequest request,
HttpResponseStatus status,
HttpHeaders headers,
byte[] bodyAsBytes) {
int statusCode = status.code();
if (statusCode >= HttpConstants.StatusCodes.MINIMUM_STATUSCODE_AS_ERROR_GATEWAY) {
String statusCodeString = status.reasonPhrase() != null
? status.reasonPhrase().replace(" ", "")
: "";
String body = bodyAsBytes != null ? new String(bodyAsBytes) : null;
CosmosError cosmosError;
cosmosError = (StringUtils.isNotEmpty(body)) ? new CosmosError(body) : new CosmosError();
cosmosError = new CosmosError(statusCodeString,
String.format("%s, StatusCode: %s", cosmosError.getMessage(), statusCodeString),
cosmosError.getPartitionedQueryExecutionInfo());
CosmosException dce = BridgeInternal.createCosmosException(request.requestContext.resourcePhysicalAddress, statusCode, cosmosError, headers.toMap());
BridgeInternal.setRequestHeaders(dce, request.getHeaders());
throw dce;
}
}
private Mono<RxDocumentServiceResponse> invokeAsyncInternal(RxDocumentServiceRequest request) {
switch (request.getOperationType()) {
case Create:
case Batch:
return this.create(request);
case Patch:
return this.patch(request);
case Upsert:
return this.upsert(request);
case Delete:
return this.delete(request);
case ExecuteJavaScript:
return this.execute(request);
case Read:
return this.read(request);
case ReadFeed:
return this.readFeed(request);
case Replace:
return this.replace(request);
case SqlQuery:
case Query:
case QueryPlan:
return this.query(request);
default:
throw new IllegalStateException("Unknown operation setType " + request.getOperationType());
}
}
private Mono<RxDocumentServiceResponse> invokeAsync(RxDocumentServiceRequest request) {
Callable<Mono<RxDocumentServiceResponse>> funcDelegate = () -> invokeAsyncInternal(request).single();
return BackoffRetryUtility.executeRetry(funcDelegate, new WebExceptionRetryPolicy());
}
@Override
public Mono<RxDocumentServiceResponse> processMessage(RxDocumentServiceRequest request) {
this.applySessionToken(request);
Mono<RxDocumentServiceResponse> responseObs = invokeAsync(request);
return responseObs.onErrorResume(
e -> {
CosmosException dce = Utils.as(e, CosmosException.class);
if (dce == null) {
logger.error("unexpected failure {}", e.getMessage(), e);
return Mono.error(e);
}
if ((!ReplicatedResourceClientUtils.isMasterResource(request.getResourceType())) &&
(dce.getStatusCode() == HttpConstants.StatusCodes.PRECONDITION_FAILED ||
dce.getStatusCode() == HttpConstants.StatusCodes.CONFLICT ||
(
dce.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND &&
!Exceptions.isSubStatusCode(dce,
HttpConstants.SubStatusCodes.READ_SESSION_NOT_AVAILABLE)))) {
this.captureSessionToken(request, dce.getResponseHeaders());
}
return Mono.error(dce);
}
).map(response ->
{
this.captureSessionToken(request, response.getResponseHeaders());
return response;
}
);
}
private void captureSessionToken(RxDocumentServiceRequest request, Map<String, String> responseHeaders) {
if (request.getResourceType() == ResourceType.DocumentCollection &&
request.getOperationType() == OperationType.Delete) {
String resourceId;
if (request.getIsNameBased()) {
resourceId = responseHeaders.get(HttpConstants.HttpHeaders.OWNER_ID);
} else {
resourceId = request.getResourceId();
}
this.sessionContainer.clearTokenByResourceId(resourceId);
} else {
this.sessionContainer.setSessionToken(request, responseHeaders);
}
}
private static boolean isMasterOperation(ResourceType resourceType, OperationType operationType) {
return ReplicatedResourceClientUtils.isMasterResource(resourceType) ||
isStoredProcedureMasterOperation(resourceType, operationType) ||
operationType == OperationType.QueryPlan;
}
private static boolean isStoredProcedureMasterOperation(ResourceType resourceType, OperationType operationType) {
return resourceType == ResourceType.StoredProcedure && operationType != OperationType.ExecuteJavaScript;
}
} |
One advantage of being explicit is that the program will fail at pipeline creation time if the option is accidentally forgotten about. Otherwise they could get a NPE. But not sure if that protection is valuable enough, or if the simplification of the API is better? | public void processElement(ProcessContext context) {
try {
context.output(jsonToRow(objectMapper(), context.element()));
} catch (Exception ex) {
if (withExtendedErrorInfo) {
context.output(
PARSE_ERROR_LINE_WITH_MSG,
Row.withSchema(ERROR_ROW_WITH_ERR_MSG_SCHEMA)
.addValue(context.element())
.addValue(ex.getMessage())
.build());
} else {
context.output(
PARSE_ERROR_LINE,
Row.withSchema(ERROR_ROW_SCHEMA).addValue(context.element()).build());
}
}
} | PARSE_ERROR_LINE_WITH_MSG, | public void processElement(ProcessContext context) {
context.output(jsonToRow(objectMapper(), context.element()));
} | class JsonToRowFn extends PTransform<PCollection<String>, PCollection<Row>> {
private transient volatile @Nullable ObjectMapper objectMapper;
private Schema schema;
static JsonToRowFn forSchema(Schema rowSchema) {
RowJson.verifySchemaSupported(rowSchema);
return new JsonToRowFn(rowSchema);
}
private JsonToRowFn(Schema schema) {
this.schema = schema;
}
@Override
public PCollection<Row> expand(PCollection<String> jsonStrings) {
return jsonStrings
.apply(
ParDo.of(
new DoFn<String, Row>() {
@ProcessElement
}))
.setRowSchema(schema);
}
private ObjectMapper objectMapper() {
if (this.objectMapper == null) {
synchronized (this) {
if (this.objectMapper == null) {
this.objectMapper = newObjectMapperWith(RowJsonDeserializer.forSchema(this.schema));
}
}
}
return this.objectMapper;
}
} | class JsonToRowFn extends PTransform<PCollection<String>, PCollection<Row>> {
private transient volatile @Nullable ObjectMapper objectMapper;
private Schema schema;
static JsonToRowFn forSchema(Schema rowSchema) {
RowJson.verifySchemaSupported(rowSchema);
return new JsonToRowFn(rowSchema);
}
private JsonToRowFn(Schema schema) {
this.schema = schema;
}
@Override
public PCollection<Row> expand(PCollection<String> jsonStrings) {
return jsonStrings
.apply(
ParDo.of(
new DoFn<String, Row>() {
@ProcessElement
}))
.setRowSchema(schema);
}
private ObjectMapper objectMapper() {
if (this.objectMapper == null) {
synchronized (this) {
if (this.objectMapper == null) {
this.objectMapper = newObjectMapperWith(RowJsonDeserializer.forSchema(this.schema));
}
}
}
return this.objectMapper;
}
} |
Adding it but it would be good to understand that do heartbeat services need to do the same or they're somehow different? | private void stopResourceManagerServices() throws Exception {
Exception exception = null;
try {
terminate();
} catch (Exception e) {
exception =
new ResourceManagerException("Error while shutting down resource manager", e);
}
delegationTokenManager.ifPresent(DelegationTokenManager::stop);
stopHeartbeatServices();
try {
slotManager.close();
} catch (Exception e) {
exception = ExceptionUtils.firstOrSuppressed(e, exception);
}
try {
jobLeaderIdService.stop();
} catch (Exception e) {
exception = ExceptionUtils.firstOrSuppressed(e, exception);
}
resourceManagerMetricGroup.close();
clearStateInternal();
ExceptionUtils.tryRethrowException(exception);
} | delegationTokenManager.ifPresent(DelegationTokenManager::stop); | private void stopResourceManagerServices() throws Exception {
Exception exception = null;
try {
terminate();
} catch (Exception e) {
exception =
new ResourceManagerException("Error while shutting down resource manager", e);
}
try {
delegationTokenManager.stop();
} catch (Exception e) {
exception = ExceptionUtils.firstOrSuppressed(e, exception);
}
stopHeartbeatServices();
try {
slotManager.close();
} catch (Exception e) {
exception = ExceptionUtils.firstOrSuppressed(e, exception);
}
try {
jobLeaderIdService.stop();
} catch (Exception e) {
exception = ExceptionUtils.firstOrSuppressed(e, exception);
}
resourceManagerMetricGroup.close();
clearStateInternal();
ExceptionUtils.tryRethrowException(exception);
} | class ResourceManager<WorkerType extends ResourceIDRetrievable>
extends FencedRpcEndpoint<ResourceManagerId> implements ResourceManagerGateway {
public static final String RESOURCE_MANAGER_NAME = "resourcemanager";
/** Unique id of the resource manager. */
private final ResourceID resourceId;
/** All currently registered JobMasterGateways scoped by JobID. */
private final Map<JobID, JobManagerRegistration> jobManagerRegistrations;
/** All currently registered JobMasterGateways scoped by ResourceID. */
private final Map<ResourceID, JobManagerRegistration> jmResourceIdRegistrations;
/** Service to retrieve the job leader ids. */
private final JobLeaderIdService jobLeaderIdService;
/** All currently registered TaskExecutors with there framework specific worker information. */
private final Map<ResourceID, WorkerRegistration<WorkerType>> taskExecutors;
/** Ongoing registration of TaskExecutors per resource ID. */
private final Map<ResourceID, CompletableFuture<TaskExecutorGateway>>
taskExecutorGatewayFutures;
private final HeartbeatServices heartbeatServices;
/** Fatal error handler. */
private final FatalErrorHandler fatalErrorHandler;
/** The slot manager maintains the available slots. */
private final SlotManager slotManager;
private final ResourceManagerPartitionTracker clusterPartitionTracker;
private final ClusterInformation clusterInformation;
protected final ResourceManagerMetricGroup resourceManagerMetricGroup;
protected final Executor ioExecutor;
private final CompletableFuture<Void> startedFuture;
/** The heartbeat manager with task managers. */
private HeartbeatManager<TaskExecutorHeartbeatPayload, Void> taskManagerHeartbeatManager;
/** The heartbeat manager with job managers. */
private HeartbeatManager<Void, Void> jobManagerHeartbeatManager;
private final Optional<DelegationTokenManager> delegationTokenManager;
public ResourceManager(
Configuration configuration,
RpcService rpcService,
UUID leaderSessionId,
ResourceID resourceId,
HeartbeatServices heartbeatServices,
SlotManager slotManager,
ResourceManagerPartitionTrackerFactory clusterPartitionTrackerFactory,
JobLeaderIdService jobLeaderIdService,
ClusterInformation clusterInformation,
FatalErrorHandler fatalErrorHandler,
ResourceManagerMetricGroup resourceManagerMetricGroup,
Time rpcTimeout,
Executor ioExecutor) {
super(
rpcService,
RpcServiceUtils.createRandomName(RESOURCE_MANAGER_NAME),
ResourceManagerId.fromUuid(leaderSessionId));
this.resourceId = checkNotNull(resourceId);
this.heartbeatServices = checkNotNull(heartbeatServices);
this.slotManager = checkNotNull(slotManager);
this.jobLeaderIdService = checkNotNull(jobLeaderIdService);
this.clusterInformation = checkNotNull(clusterInformation);
this.fatalErrorHandler = checkNotNull(fatalErrorHandler);
this.resourceManagerMetricGroup = checkNotNull(resourceManagerMetricGroup);
this.jobManagerRegistrations = new HashMap<>(4);
this.jmResourceIdRegistrations = new HashMap<>(4);
this.taskExecutors = new HashMap<>(8);
this.taskExecutorGatewayFutures = new HashMap<>(8);
this.jobManagerHeartbeatManager = NoOpHeartbeatManager.getInstance();
this.taskManagerHeartbeatManager = NoOpHeartbeatManager.getInstance();
this.clusterPartitionTracker =
checkNotNull(clusterPartitionTrackerFactory)
.get(
(taskExecutorResourceId, dataSetIds) ->
taskExecutors
.get(taskExecutorResourceId)
.getTaskExecutorGateway()
.releaseClusterPartitions(dataSetIds, rpcTimeout)
.exceptionally(
throwable -> {
log.debug(
"Request for release of cluster partitions belonging to data sets {} was not successful.",
dataSetIds,
throwable);
throw new CompletionException(
throwable);
}));
this.ioExecutor = ioExecutor;
this.startedFuture = new CompletableFuture<>();
checkNotNull(configuration, "Flink configuration must not be null");
this.delegationTokenManager =
configuration.getBoolean(SecurityOptions.KERBEROS_FETCH_DELEGATION_TOKEN)
&& HadoopDependency.isHadoopCommonOnClasspath(
getClass().getClassLoader())
? Optional.of(new DelegationTokenManager(configuration))
: Optional.empty();
}
@Override
public final void onStart() throws Exception {
try {
log.info("Starting the resource manager.");
startResourceManagerServices();
startedFuture.complete(null);
} catch (Throwable t) {
final ResourceManagerException exception =
new ResourceManagerException(
String.format("Could not start the ResourceManager %s", getAddress()),
t);
onFatalError(exception);
throw exception;
}
}
private void startResourceManagerServices() throws Exception {
try {
jobLeaderIdService.start(new JobLeaderIdActionsImpl());
registerMetrics();
startHeartbeatServices();
slotManager.start(
getFencingToken(), getMainThreadExecutor(), new ResourceActionsImpl());
delegationTokenManager.ifPresent(DelegationTokenManager::start);
initialize();
} catch (Exception e) {
handleStartResourceManagerServicesException(e);
}
}
private void handleStartResourceManagerServicesException(Exception e) throws Exception {
try {
stopResourceManagerServices();
} catch (Exception inner) {
e.addSuppressed(inner);
}
throw e;
}
/**
* Completion of this future indicates that the resource manager is fully started and is ready
* to serve.
*/
public CompletableFuture<Void> getStartedFuture() {
return startedFuture;
}
@Override
public final CompletableFuture<Void> onStop() {
try {
stopResourceManagerServices();
} catch (Exception exception) {
return FutureUtils.completedExceptionally(
new FlinkException(
"Could not properly shut down the ResourceManager.", exception));
}
return CompletableFuture.completedFuture(null);
}
@Override
public CompletableFuture<RegistrationResponse> registerJobMaster(
final JobMasterId jobMasterId,
final ResourceID jobManagerResourceId,
final String jobManagerAddress,
final JobID jobId,
final Time timeout) {
checkNotNull(jobMasterId);
checkNotNull(jobManagerResourceId);
checkNotNull(jobManagerAddress);
checkNotNull(jobId);
if (!jobLeaderIdService.containsJob(jobId)) {
try {
jobLeaderIdService.addJob(jobId);
} catch (Exception e) {
ResourceManagerException exception =
new ResourceManagerException(
"Could not add the job " + jobId + " to the job id leader service.",
e);
onFatalError(exception);
log.error("Could not add job {} to job leader id service.", jobId, e);
return FutureUtils.completedExceptionally(exception);
}
}
log.info(
"Registering job manager {}@{} for job {}.", jobMasterId, jobManagerAddress, jobId);
CompletableFuture<JobMasterId> jobMasterIdFuture;
try {
jobMasterIdFuture = jobLeaderIdService.getLeaderId(jobId);
} catch (Exception e) {
ResourceManagerException exception =
new ResourceManagerException(
"Cannot obtain the "
+ "job leader id future to verify the correct job leader.",
e);
onFatalError(exception);
log.debug(
"Could not obtain the job leader id future to verify the correct job leader.");
return FutureUtils.completedExceptionally(exception);
}
CompletableFuture<JobMasterGateway> jobMasterGatewayFuture =
getRpcService().connect(jobManagerAddress, jobMasterId, JobMasterGateway.class);
CompletableFuture<RegistrationResponse> registrationResponseFuture =
jobMasterGatewayFuture.thenCombineAsync(
jobMasterIdFuture,
(JobMasterGateway jobMasterGateway, JobMasterId leadingJobMasterId) -> {
if (Objects.equals(leadingJobMasterId, jobMasterId)) {
return registerJobMasterInternal(
jobMasterGateway,
jobId,
jobManagerAddress,
jobManagerResourceId);
} else {
final String declineMessage =
String.format(
"The leading JobMaster id %s did not match the received JobMaster id %s. "
+ "This indicates that a JobMaster leader change has happened.",
leadingJobMasterId, jobMasterId);
log.debug(declineMessage);
return new RegistrationResponse.Failure(
new FlinkException(declineMessage));
}
},
getMainThreadExecutor());
return registrationResponseFuture.handleAsync(
(RegistrationResponse registrationResponse, Throwable throwable) -> {
if (throwable != null) {
if (log.isDebugEnabled()) {
log.debug(
"Registration of job manager {}@{} failed.",
jobMasterId,
jobManagerAddress,
throwable);
} else {
log.info(
"Registration of job manager {}@{} failed.",
jobMasterId,
jobManagerAddress);
}
return new RegistrationResponse.Failure(throwable);
} else {
return registrationResponse;
}
},
ioExecutor);
}
@Override
public CompletableFuture<RegistrationResponse> registerTaskExecutor(
final TaskExecutorRegistration taskExecutorRegistration, final Time timeout) {
CompletableFuture<TaskExecutorGateway> taskExecutorGatewayFuture =
getRpcService()
.connect(
taskExecutorRegistration.getTaskExecutorAddress(),
TaskExecutorGateway.class);
taskExecutorGatewayFutures.put(
taskExecutorRegistration.getResourceId(), taskExecutorGatewayFuture);
return taskExecutorGatewayFuture.handleAsync(
(TaskExecutorGateway taskExecutorGateway, Throwable throwable) -> {
final ResourceID resourceId = taskExecutorRegistration.getResourceId();
if (taskExecutorGatewayFuture == taskExecutorGatewayFutures.get(resourceId)) {
taskExecutorGatewayFutures.remove(resourceId);
if (throwable != null) {
return new RegistrationResponse.Failure(throwable);
} else {
return registerTaskExecutorInternal(
taskExecutorGateway, taskExecutorRegistration);
}
} else {
log.debug(
"Ignoring outdated TaskExecutorGateway connection for {}.",
resourceId.getStringWithMetadata());
return new RegistrationResponse.Failure(
new FlinkException("Decline outdated task executor registration."));
}
},
getMainThreadExecutor());
}
@Override
public CompletableFuture<Acknowledge> sendSlotReport(
ResourceID taskManagerResourceId,
InstanceID taskManagerRegistrationId,
SlotReport slotReport,
Time timeout) {
final WorkerRegistration<WorkerType> workerTypeWorkerRegistration =
taskExecutors.get(taskManagerResourceId);
if (workerTypeWorkerRegistration.getInstanceID().equals(taskManagerRegistrationId)) {
if (slotManager.registerTaskManager(
workerTypeWorkerRegistration,
slotReport,
workerTypeWorkerRegistration.getTotalResourceProfile(),
workerTypeWorkerRegistration.getDefaultSlotResourceProfile())) {
onWorkerRegistered(workerTypeWorkerRegistration.getWorker());
}
return CompletableFuture.completedFuture(Acknowledge.get());
} else {
return FutureUtils.completedExceptionally(
new ResourceManagerException(
String.format(
"Unknown TaskManager registration id %s.",
taskManagerRegistrationId)));
}
}
protected void onWorkerRegistered(WorkerType worker) {
}
@Override
public CompletableFuture<Void> heartbeatFromTaskManager(
final ResourceID resourceID, final TaskExecutorHeartbeatPayload heartbeatPayload) {
return taskManagerHeartbeatManager.receiveHeartbeat(resourceID, heartbeatPayload);
}
@Override
public CompletableFuture<Void> heartbeatFromJobManager(final ResourceID resourceID) {
return jobManagerHeartbeatManager.receiveHeartbeat(resourceID, null);
}
@Override
public void disconnectTaskManager(final ResourceID resourceId, final Exception cause) {
closeTaskManagerConnection(resourceId, cause).ifPresent(ResourceManager.this::stopWorker);
}
@Override
public void disconnectJobManager(
final JobID jobId, JobStatus jobStatus, final Exception cause) {
if (jobStatus.isGloballyTerminalState()) {
removeJob(jobId, cause);
} else {
closeJobManagerConnection(jobId, ResourceRequirementHandling.RETAIN, cause);
}
}
@Override
public CompletableFuture<Acknowledge> declareRequiredResources(
JobMasterId jobMasterId, ResourceRequirements resourceRequirements, Time timeout) {
final JobID jobId = resourceRequirements.getJobId();
final JobManagerRegistration jobManagerRegistration = jobManagerRegistrations.get(jobId);
if (null != jobManagerRegistration) {
if (Objects.equals(jobMasterId, jobManagerRegistration.getJobMasterId())) {
slotManager.processResourceRequirements(resourceRequirements);
return CompletableFuture.completedFuture(Acknowledge.get());
} else {
return FutureUtils.completedExceptionally(
new ResourceManagerException(
"The job leader's id "
+ jobManagerRegistration.getJobMasterId()
+ " does not match the received id "
+ jobMasterId
+ '.'));
}
} else {
return FutureUtils.completedExceptionally(
new ResourceManagerException(
"Could not find registered job manager for job " + jobId + '.'));
}
}
@Override
public void notifySlotAvailable(
final InstanceID instanceID, final SlotID slotId, final AllocationID allocationId) {
final ResourceID resourceId = slotId.getResourceID();
WorkerRegistration<WorkerType> registration = taskExecutors.get(resourceId);
if (registration != null) {
InstanceID registrationId = registration.getInstanceID();
if (Objects.equals(registrationId, instanceID)) {
slotManager.freeSlot(slotId, allocationId);
} else {
log.debug(
"Invalid registration id for slot available message. This indicates an"
+ " outdated request.");
}
} else {
log.debug(
"Could not find registration for resource id {}. Discarding the slot available"
+ "message {}.",
resourceId.getStringWithMetadata(),
slotId);
}
}
/**
* Cleanup application and shut down cluster.
*
* @param finalStatus of the Flink application
* @param diagnostics diagnostics message for the Flink application or {@code null}
*/
@Override
public CompletableFuture<Acknowledge> deregisterApplication(
final ApplicationStatus finalStatus, @Nullable final String diagnostics) {
log.info(
"Shut down cluster because application is in {}, diagnostics {}.",
finalStatus,
diagnostics);
try {
internalDeregisterApplication(finalStatus, diagnostics);
} catch (ResourceManagerException e) {
log.warn("Could not properly shutdown the application.", e);
}
return CompletableFuture.completedFuture(Acknowledge.get());
}
@Override
public CompletableFuture<Integer> getNumberOfRegisteredTaskManagers() {
return CompletableFuture.completedFuture(taskExecutors.size());
}
@Override
public CompletableFuture<Collection<TaskManagerInfo>> requestTaskManagerInfo(Time timeout) {
final ArrayList<TaskManagerInfo> taskManagerInfos = new ArrayList<>(taskExecutors.size());
for (Map.Entry<ResourceID, WorkerRegistration<WorkerType>> taskExecutorEntry :
taskExecutors.entrySet()) {
final ResourceID resourceId = taskExecutorEntry.getKey();
final WorkerRegistration<WorkerType> taskExecutor = taskExecutorEntry.getValue();
taskManagerInfos.add(
new TaskManagerInfo(
resourceId,
taskExecutor.getTaskExecutorGateway().getAddress(),
taskExecutor.getDataPort(),
taskExecutor.getJmxPort(),
taskManagerHeartbeatManager.getLastHeartbeatFrom(resourceId),
slotManager.getNumberRegisteredSlotsOf(taskExecutor.getInstanceID()),
slotManager.getNumberFreeSlotsOf(taskExecutor.getInstanceID()),
slotManager.getRegisteredResourceOf(taskExecutor.getInstanceID()),
slotManager.getFreeResourceOf(taskExecutor.getInstanceID()),
taskExecutor.getHardwareDescription(),
taskExecutor.getMemoryConfiguration()));
}
return CompletableFuture.completedFuture(taskManagerInfos);
}
@Override
public CompletableFuture<TaskManagerInfoWithSlots> requestTaskManagerDetailsInfo(
ResourceID resourceId, Time timeout) {
final WorkerRegistration<WorkerType> taskExecutor = taskExecutors.get(resourceId);
if (taskExecutor == null) {
return FutureUtils.completedExceptionally(new UnknownTaskExecutorException(resourceId));
} else {
final InstanceID instanceId = taskExecutor.getInstanceID();
final TaskManagerInfoWithSlots taskManagerInfoWithSlots =
new TaskManagerInfoWithSlots(
new TaskManagerInfo(
resourceId,
taskExecutor.getTaskExecutorGateway().getAddress(),
taskExecutor.getDataPort(),
taskExecutor.getJmxPort(),
taskManagerHeartbeatManager.getLastHeartbeatFrom(resourceId),
slotManager.getNumberRegisteredSlotsOf(instanceId),
slotManager.getNumberFreeSlotsOf(instanceId),
slotManager.getRegisteredResourceOf(instanceId),
slotManager.getFreeResourceOf(instanceId),
taskExecutor.getHardwareDescription(),
taskExecutor.getMemoryConfiguration()),
slotManager.getAllocatedSlotsOf(instanceId));
return CompletableFuture.completedFuture(taskManagerInfoWithSlots);
}
}
@Override
public CompletableFuture<ResourceOverview> requestResourceOverview(Time timeout) {
final int numberSlots = slotManager.getNumberRegisteredSlots();
final int numberFreeSlots = slotManager.getNumberFreeSlots();
final ResourceProfile totalResource = slotManager.getRegisteredResource();
final ResourceProfile freeResource = slotManager.getFreeResource();
return CompletableFuture.completedFuture(
new ResourceOverview(
taskExecutors.size(),
numberSlots,
numberFreeSlots,
totalResource,
freeResource));
}
@Override
public CompletableFuture<Collection<Tuple2<ResourceID, String>>>
requestTaskManagerMetricQueryServiceAddresses(Time timeout) {
final ArrayList<CompletableFuture<Optional<Tuple2<ResourceID, String>>>>
metricQueryServiceAddressFutures = new ArrayList<>(taskExecutors.size());
for (Map.Entry<ResourceID, WorkerRegistration<WorkerType>> workerRegistrationEntry :
taskExecutors.entrySet()) {
final ResourceID tmResourceId = workerRegistrationEntry.getKey();
final WorkerRegistration<WorkerType> workerRegistration =
workerRegistrationEntry.getValue();
final TaskExecutorGateway taskExecutorGateway =
workerRegistration.getTaskExecutorGateway();
final CompletableFuture<Optional<Tuple2<ResourceID, String>>>
metricQueryServiceAddressFuture =
taskExecutorGateway
.requestMetricQueryServiceAddress(timeout)
.thenApply(
o ->
o.toOptional()
.map(
address ->
Tuple2.of(
tmResourceId,
address)));
metricQueryServiceAddressFutures.add(metricQueryServiceAddressFuture);
}
return FutureUtils.combineAll(metricQueryServiceAddressFutures)
.thenApply(
collection ->
collection.stream()
.filter(Optional::isPresent)
.map(Optional::get)
.collect(Collectors.toList()));
}
@Override
public CompletableFuture<TransientBlobKey> requestTaskManagerFileUploadByType(
ResourceID taskManagerId, FileType fileType, Time timeout) {
log.debug(
"Request {} file upload from TaskExecutor {}.",
fileType,
taskManagerId.getStringWithMetadata());
final WorkerRegistration<WorkerType> taskExecutor = taskExecutors.get(taskManagerId);
if (taskExecutor == null) {
log.debug(
"Request upload of file {} from unregistered TaskExecutor {}.",
fileType,
taskManagerId.getStringWithMetadata());
return FutureUtils.completedExceptionally(
new UnknownTaskExecutorException(taskManagerId));
} else {
return taskExecutor.getTaskExecutorGateway().requestFileUploadByType(fileType, timeout);
}
}
@Override
public CompletableFuture<TransientBlobKey> requestTaskManagerFileUploadByName(
ResourceID taskManagerId, String fileName, Time timeout) {
log.debug(
"Request upload of file {} from TaskExecutor {}.",
fileName,
taskManagerId.getStringWithMetadata());
final WorkerRegistration<WorkerType> taskExecutor = taskExecutors.get(taskManagerId);
if (taskExecutor == null) {
log.debug(
"Request upload of file {} from unregistered TaskExecutor {}.",
fileName,
taskManagerId.getStringWithMetadata());
return FutureUtils.completedExceptionally(
new UnknownTaskExecutorException(taskManagerId));
} else {
return taskExecutor.getTaskExecutorGateway().requestFileUploadByName(fileName, timeout);
}
}
@Override
public CompletableFuture<Collection<LogInfo>> requestTaskManagerLogList(
ResourceID taskManagerId, Time timeout) {
final WorkerRegistration<WorkerType> taskExecutor = taskExecutors.get(taskManagerId);
if (taskExecutor == null) {
log.debug(
"Requested log list from unregistered TaskExecutor {}.",
taskManagerId.getStringWithMetadata());
return FutureUtils.completedExceptionally(
new UnknownTaskExecutorException(taskManagerId));
} else {
return taskExecutor.getTaskExecutorGateway().requestLogList(timeout);
}
}
@Override
public CompletableFuture<Void> releaseClusterPartitions(IntermediateDataSetID dataSetId) {
return clusterPartitionTracker.releaseClusterPartitions(dataSetId);
}
@Override
public CompletableFuture<Map<IntermediateDataSetID, DataSetMetaInfo>> listDataSets() {
return CompletableFuture.completedFuture(clusterPartitionTracker.listDataSets());
}
@Override
public CompletableFuture<ThreadDumpInfo> requestThreadDump(
ResourceID taskManagerId, Time timeout) {
final WorkerRegistration<WorkerType> taskExecutor = taskExecutors.get(taskManagerId);
if (taskExecutor == null) {
log.debug(
"Requested thread dump from unregistered TaskExecutor {}.",
taskManagerId.getStringWithMetadata());
return FutureUtils.completedExceptionally(
new UnknownTaskExecutorException(taskManagerId));
} else {
return taskExecutor.getTaskExecutorGateway().requestThreadDump(timeout);
}
}
@Override
public CompletableFuture<TaskExecutorThreadInfoGateway> requestTaskExecutorThreadInfoGateway(
ResourceID taskManagerId, Time timeout) {
final WorkerRegistration<WorkerType> taskExecutor = taskExecutors.get(taskManagerId);
if (taskExecutor == null) {
return FutureUtils.completedExceptionally(
new UnknownTaskExecutorException(taskManagerId));
} else {
return CompletableFuture.completedFuture(taskExecutor.getTaskExecutorGateway());
}
}
/**
* Registers a new JobMaster.
*
* @param jobMasterGateway to communicate with the registering JobMaster
* @param jobId of the job for which the JobMaster is responsible
* @param jobManagerAddress address of the JobMaster
* @param jobManagerResourceId ResourceID of the JobMaster
* @return RegistrationResponse
*/
private RegistrationResponse registerJobMasterInternal(
final JobMasterGateway jobMasterGateway,
JobID jobId,
String jobManagerAddress,
ResourceID jobManagerResourceId) {
if (jobManagerRegistrations.containsKey(jobId)) {
JobManagerRegistration oldJobManagerRegistration = jobManagerRegistrations.get(jobId);
if (Objects.equals(
oldJobManagerRegistration.getJobMasterId(),
jobMasterGateway.getFencingToken())) {
log.debug(
"Job manager {}@{} was already registered.",
jobMasterGateway.getFencingToken(),
jobManagerAddress);
} else {
closeJobManagerConnection(
oldJobManagerRegistration.getJobID(),
ResourceRequirementHandling.RETAIN,
new Exception("New job leader for job " + jobId + " found."));
JobManagerRegistration jobManagerRegistration =
new JobManagerRegistration(jobId, jobManagerResourceId, jobMasterGateway);
jobManagerRegistrations.put(jobId, jobManagerRegistration);
jmResourceIdRegistrations.put(jobManagerResourceId, jobManagerRegistration);
}
} else {
JobManagerRegistration jobManagerRegistration =
new JobManagerRegistration(jobId, jobManagerResourceId, jobMasterGateway);
jobManagerRegistrations.put(jobId, jobManagerRegistration);
jmResourceIdRegistrations.put(jobManagerResourceId, jobManagerRegistration);
}
log.info(
"Registered job manager {}@{} for job {}.",
jobMasterGateway.getFencingToken(),
jobManagerAddress,
jobId);
jobManagerHeartbeatManager.monitorTarget(
jobManagerResourceId, new JobMasterHeartbeatSender(jobMasterGateway));
return new JobMasterRegistrationSuccess(getFencingToken(), resourceId);
}
/**
* Registers a new TaskExecutor.
*
* @param taskExecutorRegistration task executor registration parameters
* @return RegistrationResponse
*/
private RegistrationResponse registerTaskExecutorInternal(
TaskExecutorGateway taskExecutorGateway,
TaskExecutorRegistration taskExecutorRegistration) {
ResourceID taskExecutorResourceId = taskExecutorRegistration.getResourceId();
WorkerRegistration<WorkerType> oldRegistration =
taskExecutors.remove(taskExecutorResourceId);
if (oldRegistration != null) {
log.debug(
"Replacing old registration of TaskExecutor {}.",
taskExecutorResourceId.getStringWithMetadata());
slotManager.unregisterTaskManager(
oldRegistration.getInstanceID(),
new ResourceManagerException(
String.format(
"TaskExecutor %s re-connected to the ResourceManager.",
taskExecutorResourceId.getStringWithMetadata())));
}
final WorkerType newWorker = workerStarted(taskExecutorResourceId);
String taskExecutorAddress = taskExecutorRegistration.getTaskExecutorAddress();
if (newWorker == null) {
log.warn(
"Discard registration from TaskExecutor {} at ({}) because the framework did "
+ "not recognize it",
taskExecutorResourceId.getStringWithMetadata(),
taskExecutorAddress);
return new TaskExecutorRegistrationRejection(
"The ResourceManager does not recognize this TaskExecutor.");
} else {
WorkerRegistration<WorkerType> registration =
new WorkerRegistration<>(
taskExecutorGateway,
newWorker,
taskExecutorRegistration.getDataPort(),
taskExecutorRegistration.getJmxPort(),
taskExecutorRegistration.getHardwareDescription(),
taskExecutorRegistration.getMemoryConfiguration(),
taskExecutorRegistration.getTotalResourceProfile(),
taskExecutorRegistration.getDefaultSlotResourceProfile());
log.info(
"Registering TaskManager with ResourceID {} ({}) at ResourceManager",
taskExecutorResourceId.getStringWithMetadata(),
taskExecutorAddress);
taskExecutors.put(taskExecutorResourceId, registration);
taskManagerHeartbeatManager.monitorTarget(
taskExecutorResourceId, new TaskExecutorHeartbeatSender(taskExecutorGateway));
return new TaskExecutorRegistrationSuccess(
registration.getInstanceID(), resourceId, clusterInformation);
}
}
protected void registerMetrics() {
resourceManagerMetricGroup.gauge(
MetricNames.NUM_REGISTERED_TASK_MANAGERS, () -> (long) taskExecutors.size());
}
private void clearStateInternal() {
jobManagerRegistrations.clear();
jmResourceIdRegistrations.clear();
taskExecutors.clear();
try {
jobLeaderIdService.clear();
} catch (Exception e) {
onFatalError(
new ResourceManagerException(
"Could not properly clear the job leader id service.", e));
}
}
/**
* This method should be called by the framework once it detects that a currently registered job
* manager has failed.
*
* @param jobId identifying the job whose leader shall be disconnected.
* @param resourceRequirementHandling indicating how existing resource requirements for the
* corresponding job should be handled
* @param cause The exception which cause the JobManager failed.
*/
protected void closeJobManagerConnection(
JobID jobId, ResourceRequirementHandling resourceRequirementHandling, Exception cause) {
JobManagerRegistration jobManagerRegistration = jobManagerRegistrations.remove(jobId);
if (jobManagerRegistration != null) {
final ResourceID jobManagerResourceId =
jobManagerRegistration.getJobManagerResourceID();
final JobMasterGateway jobMasterGateway = jobManagerRegistration.getJobManagerGateway();
final JobMasterId jobMasterId = jobManagerRegistration.getJobMasterId();
log.info(
"Disconnect job manager {}@{} for job {} from the resource manager.",
jobMasterId,
jobMasterGateway.getAddress(),
jobId);
jobManagerHeartbeatManager.unmonitorTarget(jobManagerResourceId);
jmResourceIdRegistrations.remove(jobManagerResourceId);
if (resourceRequirementHandling == ResourceRequirementHandling.CLEAR) {
slotManager.clearResourceRequirements(jobId);
}
jobMasterGateway.disconnectResourceManager(getFencingToken(), cause);
} else {
log.debug("There was no registered job manager for job {}.", jobId);
}
}
/**
* This method should be called by the framework once it detects that a currently registered
* task executor has failed.
*
* @param resourceID Id of the TaskManager that has failed.
* @param cause The exception which cause the TaskManager failed.
* @return The {@link WorkerType} of the closed connection, or empty if already removed.
*/
protected Optional<WorkerType> closeTaskManagerConnection(
final ResourceID resourceID, final Exception cause) {
taskManagerHeartbeatManager.unmonitorTarget(resourceID);
WorkerRegistration<WorkerType> workerRegistration = taskExecutors.remove(resourceID);
if (workerRegistration != null) {
log.info(
"Closing TaskExecutor connection {} because: {}",
resourceID.getStringWithMetadata(),
cause.getMessage());
slotManager.unregisterTaskManager(workerRegistration.getInstanceID(), cause);
clusterPartitionTracker.processTaskExecutorShutdown(resourceID);
workerRegistration.getTaskExecutorGateway().disconnectResourceManager(cause);
} else {
log.debug(
"No open TaskExecutor connection {}. Ignoring close TaskExecutor connection. Closing reason was: {}",
resourceID.getStringWithMetadata(),
cause.getMessage());
}
return Optional.ofNullable(workerRegistration).map(WorkerRegistration::getWorker);
}
protected void removeJob(JobID jobId, Exception cause) {
try {
jobLeaderIdService.removeJob(jobId);
} catch (Exception e) {
log.warn(
"Could not properly remove the job {} from the job leader id service.",
jobId,
e);
}
if (jobManagerRegistrations.containsKey(jobId)) {
closeJobManagerConnection(jobId, ResourceRequirementHandling.CLEAR, cause);
}
}
protected void jobLeaderLostLeadership(JobID jobId, JobMasterId oldJobMasterId) {
if (jobManagerRegistrations.containsKey(jobId)) {
JobManagerRegistration jobManagerRegistration = jobManagerRegistrations.get(jobId);
if (Objects.equals(jobManagerRegistration.getJobMasterId(), oldJobMasterId)) {
closeJobManagerConnection(
jobId,
ResourceRequirementHandling.RETAIN,
new Exception("Job leader lost leadership."));
} else {
log.debug(
"Discarding job leader lost leadership, because a new job leader was found for job {}. ",
jobId);
}
} else {
log.debug(
"Discard job leader lost leadership for outdated leader {} for job {}.",
oldJobMasterId,
jobId);
}
}
protected void releaseResource(InstanceID instanceId, Exception cause) {
WorkerType worker = null;
for (Map.Entry<ResourceID, WorkerRegistration<WorkerType>> entry :
taskExecutors.entrySet()) {
if (entry.getValue().getInstanceID().equals(instanceId)) {
worker = entry.getValue().getWorker();
break;
}
}
if (worker != null) {
if (stopWorker(worker)) {
closeTaskManagerConnection(worker.getResourceID(), cause);
} else {
log.debug(
"Worker {} could not be stopped.",
worker.getResourceID().getStringWithMetadata());
}
} else {
slotManager.unregisterTaskManager(instanceId, cause);
}
}
private enum ResourceRequirementHandling {
RETAIN,
CLEAR
}
/**
* Notifies the ResourceManager that a fatal error has occurred and it cannot proceed.
*
* @param t The exception describing the fatal error
*/
protected void onFatalError(Throwable t) {
try {
log.error("Fatal error occurred in ResourceManager.", t);
} catch (Throwable ignored) {
}
fatalErrorHandler.onFatalError(t);
}
private void startHeartbeatServices() {
taskManagerHeartbeatManager =
heartbeatServices.createHeartbeatManagerSender(
resourceId,
new TaskManagerHeartbeatListener(),
getMainThreadExecutor(),
log);
jobManagerHeartbeatManager =
heartbeatServices.createHeartbeatManagerSender(
resourceId,
new JobManagerHeartbeatListener(),
getMainThreadExecutor(),
log);
}
private void stopHeartbeatServices() {
taskManagerHeartbeatManager.stop();
jobManagerHeartbeatManager.stop();
}
/**
* Initializes the framework specific components.
*
* @throws ResourceManagerException which occurs during initialization and causes the resource
* manager to fail.
*/
protected abstract void initialize() throws ResourceManagerException;
/**
* Terminates the framework specific components.
*
* @throws Exception which occurs during termination.
*/
protected abstract void terminate() throws Exception;
/**
* The framework specific code to deregister the application. This should report the
* application's final status and shut down the resource manager cleanly.
*
* <p>This method also needs to make sure all pending containers that are not registered yet are
* returned.
*
* @param finalStatus The application status to report.
* @param optionalDiagnostics A diagnostics message or {@code null}.
* @throws ResourceManagerException if the application could not be shut down.
*/
protected abstract void internalDeregisterApplication(
ApplicationStatus finalStatus, @Nullable String optionalDiagnostics)
throws ResourceManagerException;
/**
* Allocates a resource using the worker resource specification.
*
* @param workerResourceSpec workerResourceSpec specifies the size of the to be allocated
* resource
* @return whether the resource can be allocated
*/
@VisibleForTesting
public abstract boolean startNewWorker(WorkerResourceSpec workerResourceSpec);
/**
* Callback when a worker was started.
*
* @param resourceID The worker resource id
*/
protected abstract WorkerType workerStarted(ResourceID resourceID);
/**
* Stops the given worker.
*
* @param worker The worker.
* @return True if the worker was stopped, otherwise false
*/
public abstract boolean stopWorker(WorkerType worker);
/**
* Set {@link SlotManager} whether to fail unfulfillable slot requests.
*
* @param failUnfulfillableRequest whether to fail unfulfillable requests
*/
protected void setFailUnfulfillableRequest(boolean failUnfulfillableRequest) {
slotManager.setFailUnfulfillableRequest(failUnfulfillableRequest);
}
private static final class JobMasterHeartbeatSender extends HeartbeatSender<Void> {
private final JobMasterGateway jobMasterGateway;
private JobMasterHeartbeatSender(JobMasterGateway jobMasterGateway) {
this.jobMasterGateway = jobMasterGateway;
}
@Override
public CompletableFuture<Void> requestHeartbeat(ResourceID resourceID, Void payload) {
return jobMasterGateway.heartbeatFromResourceManager(resourceID);
}
}
private static final class TaskExecutorHeartbeatSender extends HeartbeatSender<Void> {
private final TaskExecutorGateway taskExecutorGateway;
private TaskExecutorHeartbeatSender(TaskExecutorGateway taskExecutorGateway) {
this.taskExecutorGateway = taskExecutorGateway;
}
@Override
public CompletableFuture<Void> requestHeartbeat(ResourceID resourceID, Void payload) {
return taskExecutorGateway.heartbeatFromResourceManager(resourceID);
}
}
private class ResourceActionsImpl implements ResourceActions {
@Override
public void releaseResource(InstanceID instanceId, Exception cause) {
validateRunsInMainThread();
ResourceManager.this.releaseResource(instanceId, cause);
}
@Override
public boolean allocateResource(WorkerResourceSpec workerResourceSpec) {
validateRunsInMainThread();
return startNewWorker(workerResourceSpec);
}
@Override
public void notifyAllocationFailure(
JobID jobId, AllocationID allocationId, Exception cause) {
validateRunsInMainThread();
JobManagerRegistration jobManagerRegistration = jobManagerRegistrations.get(jobId);
if (jobManagerRegistration != null) {
jobManagerRegistration
.getJobManagerGateway()
.notifyAllocationFailure(allocationId, cause);
}
}
@Override
public void notifyNotEnoughResourcesAvailable(
JobID jobId, Collection<ResourceRequirement> acquiredResources) {
validateRunsInMainThread();
JobManagerRegistration jobManagerRegistration = jobManagerRegistrations.get(jobId);
if (jobManagerRegistration != null) {
jobManagerRegistration
.getJobManagerGateway()
.notifyNotEnoughResourcesAvailable(acquiredResources);
}
}
}
private class JobLeaderIdActionsImpl implements JobLeaderIdActions {
@Override
public void jobLeaderLostLeadership(final JobID jobId, final JobMasterId oldJobMasterId) {
runAsync(
new Runnable() {
@Override
public void run() {
ResourceManager.this.jobLeaderLostLeadership(jobId, oldJobMasterId);
}
});
}
@Override
public void notifyJobTimeout(final JobID jobId, final UUID timeoutId) {
runAsync(
new Runnable() {
@Override
public void run() {
if (jobLeaderIdService.isValidTimeout(jobId, timeoutId)) {
removeJob(
jobId,
new Exception(
"Job " + jobId + "was removed because of timeout"));
}
}
});
}
@Override
public void handleError(Throwable error) {
onFatalError(error);
}
}
private class TaskManagerHeartbeatListener
implements HeartbeatListener<TaskExecutorHeartbeatPayload, Void> {
@Override
public void notifyHeartbeatTimeout(final ResourceID resourceID) {
final String message =
String.format(
"The heartbeat of TaskManager with id %s timed out.",
resourceID.getStringWithMetadata());
log.info(message);
handleTaskManagerConnectionLoss(resourceID, new TimeoutException(message));
}
private void handleTaskManagerConnectionLoss(ResourceID resourceID, Exception cause) {
validateRunsInMainThread();
closeTaskManagerConnection(resourceID, cause)
.ifPresent(ResourceManager.this::stopWorker);
}
@Override
public void notifyTargetUnreachable(ResourceID resourceID) {
final String message =
String.format(
"TaskManager with id %s is no longer reachable.",
resourceID.getStringWithMetadata());
log.info(message);
handleTaskManagerConnectionLoss(resourceID, new ResourceManagerException(message));
}
@Override
public void reportPayload(
final ResourceID resourceID, final TaskExecutorHeartbeatPayload payload) {
validateRunsInMainThread();
final WorkerRegistration<WorkerType> workerRegistration = taskExecutors.get(resourceID);
if (workerRegistration == null) {
log.debug(
"Received slot report from TaskManager {} which is no longer registered.",
resourceID.getStringWithMetadata());
} else {
InstanceID instanceId = workerRegistration.getInstanceID();
slotManager.reportSlotStatus(instanceId, payload.getSlotReport());
clusterPartitionTracker.processTaskExecutorClusterPartitionReport(
resourceID, payload.getClusterPartitionReport());
}
}
@Override
public Void retrievePayload(ResourceID resourceID) {
return null;
}
}
private class JobManagerHeartbeatListener implements HeartbeatListener<Void, Void> {
@Override
public void notifyHeartbeatTimeout(final ResourceID resourceID) {
final String message =
String.format(
"The heartbeat of JobManager with id %s timed out.",
resourceID.getStringWithMetadata());
log.info(message);
handleJobManagerConnectionLoss(resourceID, new TimeoutException(message));
}
private void handleJobManagerConnectionLoss(ResourceID resourceID, Exception cause) {
validateRunsInMainThread();
if (jmResourceIdRegistrations.containsKey(resourceID)) {
JobManagerRegistration jobManagerRegistration =
jmResourceIdRegistrations.get(resourceID);
if (jobManagerRegistration != null) {
closeJobManagerConnection(
jobManagerRegistration.getJobID(),
ResourceRequirementHandling.RETAIN,
cause);
}
}
}
@Override
public void notifyTargetUnreachable(ResourceID resourceID) {
final String message =
String.format(
"JobManager with id %s is no longer reachable.",
resourceID.getStringWithMetadata());
log.info(message);
handleJobManagerConnectionLoss(resourceID, new ResourceManagerException(message));
}
@Override
public void reportPayload(ResourceID resourceID, Void payload) {
}
@Override
public Void retrievePayload(ResourceID resourceID) {
return null;
}
}
protected int getNumberRequiredTaskManagers() {
return getRequiredResources().values().stream().reduce(0, Integer::sum);
}
protected Map<WorkerResourceSpec, Integer> getRequiredResources() {
return slotManager.getRequiredResources();
}
} | class ResourceManager<WorkerType extends ResourceIDRetrievable>
extends FencedRpcEndpoint<ResourceManagerId> implements ResourceManagerGateway {
public static final String RESOURCE_MANAGER_NAME = "resourcemanager";
/** Unique id of the resource manager. */
private final ResourceID resourceId;
/** All currently registered JobMasterGateways scoped by JobID. */
private final Map<JobID, JobManagerRegistration> jobManagerRegistrations;
/** All currently registered JobMasterGateways scoped by ResourceID. */
private final Map<ResourceID, JobManagerRegistration> jmResourceIdRegistrations;
/** Service to retrieve the job leader ids. */
private final JobLeaderIdService jobLeaderIdService;
/** All currently registered TaskExecutors with there framework specific worker information. */
private final Map<ResourceID, WorkerRegistration<WorkerType>> taskExecutors;
/** Ongoing registration of TaskExecutors per resource ID. */
private final Map<ResourceID, CompletableFuture<TaskExecutorGateway>>
taskExecutorGatewayFutures;
private final HeartbeatServices heartbeatServices;
/** Fatal error handler. */
private final FatalErrorHandler fatalErrorHandler;
/** The slot manager maintains the available slots. */
private final SlotManager slotManager;
private final ResourceManagerPartitionTracker clusterPartitionTracker;
private final ClusterInformation clusterInformation;
protected final ResourceManagerMetricGroup resourceManagerMetricGroup;
protected final Executor ioExecutor;
private final CompletableFuture<Void> startedFuture;
/** The heartbeat manager with task managers. */
private HeartbeatManager<TaskExecutorHeartbeatPayload, Void> taskManagerHeartbeatManager;
/** The heartbeat manager with job managers. */
private HeartbeatManager<Void, Void> jobManagerHeartbeatManager;
private final DelegationTokenManager delegationTokenManager;
public ResourceManager(
RpcService rpcService,
UUID leaderSessionId,
ResourceID resourceId,
HeartbeatServices heartbeatServices,
DelegationTokenManager delegationTokenManager,
SlotManager slotManager,
ResourceManagerPartitionTrackerFactory clusterPartitionTrackerFactory,
JobLeaderIdService jobLeaderIdService,
ClusterInformation clusterInformation,
FatalErrorHandler fatalErrorHandler,
ResourceManagerMetricGroup resourceManagerMetricGroup,
Time rpcTimeout,
Executor ioExecutor) {
super(
rpcService,
RpcServiceUtils.createRandomName(RESOURCE_MANAGER_NAME),
ResourceManagerId.fromUuid(leaderSessionId));
this.resourceId = checkNotNull(resourceId);
this.heartbeatServices = checkNotNull(heartbeatServices);
this.slotManager = checkNotNull(slotManager);
this.jobLeaderIdService = checkNotNull(jobLeaderIdService);
this.clusterInformation = checkNotNull(clusterInformation);
this.fatalErrorHandler = checkNotNull(fatalErrorHandler);
this.resourceManagerMetricGroup = checkNotNull(resourceManagerMetricGroup);
this.jobManagerRegistrations = new HashMap<>(4);
this.jmResourceIdRegistrations = new HashMap<>(4);
this.taskExecutors = new HashMap<>(8);
this.taskExecutorGatewayFutures = new HashMap<>(8);
this.jobManagerHeartbeatManager = NoOpHeartbeatManager.getInstance();
this.taskManagerHeartbeatManager = NoOpHeartbeatManager.getInstance();
this.clusterPartitionTracker =
checkNotNull(clusterPartitionTrackerFactory)
.get(
(taskExecutorResourceId, dataSetIds) ->
taskExecutors
.get(taskExecutorResourceId)
.getTaskExecutorGateway()
.releaseClusterPartitions(dataSetIds, rpcTimeout)
.exceptionally(
throwable -> {
log.debug(
"Request for release of cluster partitions belonging to data sets {} was not successful.",
dataSetIds,
throwable);
throw new CompletionException(
throwable);
}));
this.ioExecutor = ioExecutor;
this.startedFuture = new CompletableFuture<>();
this.delegationTokenManager = delegationTokenManager;
}
@Override
public final void onStart() throws Exception {
try {
log.info("Starting the resource manager.");
startResourceManagerServices();
startedFuture.complete(null);
} catch (Throwable t) {
final ResourceManagerException exception =
new ResourceManagerException(
String.format("Could not start the ResourceManager %s", getAddress()),
t);
onFatalError(exception);
throw exception;
}
}
private void startResourceManagerServices() throws Exception {
try {
jobLeaderIdService.start(new JobLeaderIdActionsImpl());
registerMetrics();
startHeartbeatServices();
slotManager.start(
getFencingToken(), getMainThreadExecutor(), new ResourceActionsImpl());
delegationTokenManager.start();
initialize();
} catch (Exception e) {
handleStartResourceManagerServicesException(e);
}
}
private void handleStartResourceManagerServicesException(Exception e) throws Exception {
try {
stopResourceManagerServices();
} catch (Exception inner) {
e.addSuppressed(inner);
}
throw e;
}
/**
* Completion of this future indicates that the resource manager is fully started and is ready
* to serve.
*/
public CompletableFuture<Void> getStartedFuture() {
return startedFuture;
}
@Override
public final CompletableFuture<Void> onStop() {
try {
stopResourceManagerServices();
} catch (Exception exception) {
return FutureUtils.completedExceptionally(
new FlinkException(
"Could not properly shut down the ResourceManager.", exception));
}
return CompletableFuture.completedFuture(null);
}
@Override
public CompletableFuture<RegistrationResponse> registerJobMaster(
final JobMasterId jobMasterId,
final ResourceID jobManagerResourceId,
final String jobManagerAddress,
final JobID jobId,
final Time timeout) {
checkNotNull(jobMasterId);
checkNotNull(jobManagerResourceId);
checkNotNull(jobManagerAddress);
checkNotNull(jobId);
if (!jobLeaderIdService.containsJob(jobId)) {
try {
jobLeaderIdService.addJob(jobId);
} catch (Exception e) {
ResourceManagerException exception =
new ResourceManagerException(
"Could not add the job " + jobId + " to the job id leader service.",
e);
onFatalError(exception);
log.error("Could not add job {} to job leader id service.", jobId, e);
return FutureUtils.completedExceptionally(exception);
}
}
log.info(
"Registering job manager {}@{} for job {}.", jobMasterId, jobManagerAddress, jobId);
CompletableFuture<JobMasterId> jobMasterIdFuture;
try {
jobMasterIdFuture = jobLeaderIdService.getLeaderId(jobId);
} catch (Exception e) {
ResourceManagerException exception =
new ResourceManagerException(
"Cannot obtain the "
+ "job leader id future to verify the correct job leader.",
e);
onFatalError(exception);
log.debug(
"Could not obtain the job leader id future to verify the correct job leader.");
return FutureUtils.completedExceptionally(exception);
}
CompletableFuture<JobMasterGateway> jobMasterGatewayFuture =
getRpcService().connect(jobManagerAddress, jobMasterId, JobMasterGateway.class);
CompletableFuture<RegistrationResponse> registrationResponseFuture =
jobMasterGatewayFuture.thenCombineAsync(
jobMasterIdFuture,
(JobMasterGateway jobMasterGateway, JobMasterId leadingJobMasterId) -> {
if (Objects.equals(leadingJobMasterId, jobMasterId)) {
return registerJobMasterInternal(
jobMasterGateway,
jobId,
jobManagerAddress,
jobManagerResourceId);
} else {
final String declineMessage =
String.format(
"The leading JobMaster id %s did not match the received JobMaster id %s. "
+ "This indicates that a JobMaster leader change has happened.",
leadingJobMasterId, jobMasterId);
log.debug(declineMessage);
return new RegistrationResponse.Failure(
new FlinkException(declineMessage));
}
},
getMainThreadExecutor());
return registrationResponseFuture.handleAsync(
(RegistrationResponse registrationResponse, Throwable throwable) -> {
if (throwable != null) {
if (log.isDebugEnabled()) {
log.debug(
"Registration of job manager {}@{} failed.",
jobMasterId,
jobManagerAddress,
throwable);
} else {
log.info(
"Registration of job manager {}@{} failed.",
jobMasterId,
jobManagerAddress);
}
return new RegistrationResponse.Failure(throwable);
} else {
return registrationResponse;
}
},
ioExecutor);
}
@Override
public CompletableFuture<RegistrationResponse> registerTaskExecutor(
final TaskExecutorRegistration taskExecutorRegistration, final Time timeout) {
CompletableFuture<TaskExecutorGateway> taskExecutorGatewayFuture =
getRpcService()
.connect(
taskExecutorRegistration.getTaskExecutorAddress(),
TaskExecutorGateway.class);
taskExecutorGatewayFutures.put(
taskExecutorRegistration.getResourceId(), taskExecutorGatewayFuture);
return taskExecutorGatewayFuture.handleAsync(
(TaskExecutorGateway taskExecutorGateway, Throwable throwable) -> {
final ResourceID resourceId = taskExecutorRegistration.getResourceId();
if (taskExecutorGatewayFuture == taskExecutorGatewayFutures.get(resourceId)) {
taskExecutorGatewayFutures.remove(resourceId);
if (throwable != null) {
return new RegistrationResponse.Failure(throwable);
} else {
return registerTaskExecutorInternal(
taskExecutorGateway, taskExecutorRegistration);
}
} else {
log.debug(
"Ignoring outdated TaskExecutorGateway connection for {}.",
resourceId.getStringWithMetadata());
return new RegistrationResponse.Failure(
new FlinkException("Decline outdated task executor registration."));
}
},
getMainThreadExecutor());
}
@Override
public CompletableFuture<Acknowledge> sendSlotReport(
ResourceID taskManagerResourceId,
InstanceID taskManagerRegistrationId,
SlotReport slotReport,
Time timeout) {
final WorkerRegistration<WorkerType> workerTypeWorkerRegistration =
taskExecutors.get(taskManagerResourceId);
if (workerTypeWorkerRegistration.getInstanceID().equals(taskManagerRegistrationId)) {
if (slotManager.registerTaskManager(
workerTypeWorkerRegistration,
slotReport,
workerTypeWorkerRegistration.getTotalResourceProfile(),
workerTypeWorkerRegistration.getDefaultSlotResourceProfile())) {
onWorkerRegistered(workerTypeWorkerRegistration.getWorker());
}
return CompletableFuture.completedFuture(Acknowledge.get());
} else {
return FutureUtils.completedExceptionally(
new ResourceManagerException(
String.format(
"Unknown TaskManager registration id %s.",
taskManagerRegistrationId)));
}
}
protected void onWorkerRegistered(WorkerType worker) {
}
@Override
public CompletableFuture<Void> heartbeatFromTaskManager(
final ResourceID resourceID, final TaskExecutorHeartbeatPayload heartbeatPayload) {
return taskManagerHeartbeatManager.receiveHeartbeat(resourceID, heartbeatPayload);
}
@Override
public CompletableFuture<Void> heartbeatFromJobManager(final ResourceID resourceID) {
return jobManagerHeartbeatManager.receiveHeartbeat(resourceID, null);
}
@Override
public void disconnectTaskManager(final ResourceID resourceId, final Exception cause) {
closeTaskManagerConnection(resourceId, cause).ifPresent(ResourceManager.this::stopWorker);
}
@Override
public void disconnectJobManager(
final JobID jobId, JobStatus jobStatus, final Exception cause) {
if (jobStatus.isGloballyTerminalState()) {
removeJob(jobId, cause);
} else {
closeJobManagerConnection(jobId, ResourceRequirementHandling.RETAIN, cause);
}
}
@Override
public CompletableFuture<Acknowledge> declareRequiredResources(
JobMasterId jobMasterId, ResourceRequirements resourceRequirements, Time timeout) {
final JobID jobId = resourceRequirements.getJobId();
final JobManagerRegistration jobManagerRegistration = jobManagerRegistrations.get(jobId);
if (null != jobManagerRegistration) {
if (Objects.equals(jobMasterId, jobManagerRegistration.getJobMasterId())) {
slotManager.processResourceRequirements(resourceRequirements);
return CompletableFuture.completedFuture(Acknowledge.get());
} else {
return FutureUtils.completedExceptionally(
new ResourceManagerException(
"The job leader's id "
+ jobManagerRegistration.getJobMasterId()
+ " does not match the received id "
+ jobMasterId
+ '.'));
}
} else {
return FutureUtils.completedExceptionally(
new ResourceManagerException(
"Could not find registered job manager for job " + jobId + '.'));
}
}
@Override
public void notifySlotAvailable(
final InstanceID instanceID, final SlotID slotId, final AllocationID allocationId) {
final ResourceID resourceId = slotId.getResourceID();
WorkerRegistration<WorkerType> registration = taskExecutors.get(resourceId);
if (registration != null) {
InstanceID registrationId = registration.getInstanceID();
if (Objects.equals(registrationId, instanceID)) {
slotManager.freeSlot(slotId, allocationId);
} else {
log.debug(
"Invalid registration id for slot available message. This indicates an"
+ " outdated request.");
}
} else {
log.debug(
"Could not find registration for resource id {}. Discarding the slot available"
+ "message {}.",
resourceId.getStringWithMetadata(),
slotId);
}
}
/**
* Cleanup application and shut down cluster.
*
* @param finalStatus of the Flink application
* @param diagnostics diagnostics message for the Flink application or {@code null}
*/
@Override
public CompletableFuture<Acknowledge> deregisterApplication(
final ApplicationStatus finalStatus, @Nullable final String diagnostics) {
log.info(
"Shut down cluster because application is in {}, diagnostics {}.",
finalStatus,
diagnostics);
try {
internalDeregisterApplication(finalStatus, diagnostics);
} catch (ResourceManagerException e) {
log.warn("Could not properly shutdown the application.", e);
}
return CompletableFuture.completedFuture(Acknowledge.get());
}
@Override
public CompletableFuture<Integer> getNumberOfRegisteredTaskManagers() {
return CompletableFuture.completedFuture(taskExecutors.size());
}
@Override
public CompletableFuture<Collection<TaskManagerInfo>> requestTaskManagerInfo(Time timeout) {
final ArrayList<TaskManagerInfo> taskManagerInfos = new ArrayList<>(taskExecutors.size());
for (Map.Entry<ResourceID, WorkerRegistration<WorkerType>> taskExecutorEntry :
taskExecutors.entrySet()) {
final ResourceID resourceId = taskExecutorEntry.getKey();
final WorkerRegistration<WorkerType> taskExecutor = taskExecutorEntry.getValue();
taskManagerInfos.add(
new TaskManagerInfo(
resourceId,
taskExecutor.getTaskExecutorGateway().getAddress(),
taskExecutor.getDataPort(),
taskExecutor.getJmxPort(),
taskManagerHeartbeatManager.getLastHeartbeatFrom(resourceId),
slotManager.getNumberRegisteredSlotsOf(taskExecutor.getInstanceID()),
slotManager.getNumberFreeSlotsOf(taskExecutor.getInstanceID()),
slotManager.getRegisteredResourceOf(taskExecutor.getInstanceID()),
slotManager.getFreeResourceOf(taskExecutor.getInstanceID()),
taskExecutor.getHardwareDescription(),
taskExecutor.getMemoryConfiguration()));
}
return CompletableFuture.completedFuture(taskManagerInfos);
}
@Override
public CompletableFuture<TaskManagerInfoWithSlots> requestTaskManagerDetailsInfo(
ResourceID resourceId, Time timeout) {
final WorkerRegistration<WorkerType> taskExecutor = taskExecutors.get(resourceId);
if (taskExecutor == null) {
return FutureUtils.completedExceptionally(new UnknownTaskExecutorException(resourceId));
} else {
final InstanceID instanceId = taskExecutor.getInstanceID();
final TaskManagerInfoWithSlots taskManagerInfoWithSlots =
new TaskManagerInfoWithSlots(
new TaskManagerInfo(
resourceId,
taskExecutor.getTaskExecutorGateway().getAddress(),
taskExecutor.getDataPort(),
taskExecutor.getJmxPort(),
taskManagerHeartbeatManager.getLastHeartbeatFrom(resourceId),
slotManager.getNumberRegisteredSlotsOf(instanceId),
slotManager.getNumberFreeSlotsOf(instanceId),
slotManager.getRegisteredResourceOf(instanceId),
slotManager.getFreeResourceOf(instanceId),
taskExecutor.getHardwareDescription(),
taskExecutor.getMemoryConfiguration()),
slotManager.getAllocatedSlotsOf(instanceId));
return CompletableFuture.completedFuture(taskManagerInfoWithSlots);
}
}
@Override
public CompletableFuture<ResourceOverview> requestResourceOverview(Time timeout) {
final int numberSlots = slotManager.getNumberRegisteredSlots();
final int numberFreeSlots = slotManager.getNumberFreeSlots();
final ResourceProfile totalResource = slotManager.getRegisteredResource();
final ResourceProfile freeResource = slotManager.getFreeResource();
return CompletableFuture.completedFuture(
new ResourceOverview(
taskExecutors.size(),
numberSlots,
numberFreeSlots,
totalResource,
freeResource));
}
@Override
public CompletableFuture<Collection<Tuple2<ResourceID, String>>>
requestTaskManagerMetricQueryServiceAddresses(Time timeout) {
final ArrayList<CompletableFuture<Optional<Tuple2<ResourceID, String>>>>
metricQueryServiceAddressFutures = new ArrayList<>(taskExecutors.size());
for (Map.Entry<ResourceID, WorkerRegistration<WorkerType>> workerRegistrationEntry :
taskExecutors.entrySet()) {
final ResourceID tmResourceId = workerRegistrationEntry.getKey();
final WorkerRegistration<WorkerType> workerRegistration =
workerRegistrationEntry.getValue();
final TaskExecutorGateway taskExecutorGateway =
workerRegistration.getTaskExecutorGateway();
final CompletableFuture<Optional<Tuple2<ResourceID, String>>>
metricQueryServiceAddressFuture =
taskExecutorGateway
.requestMetricQueryServiceAddress(timeout)
.thenApply(
o ->
o.toOptional()
.map(
address ->
Tuple2.of(
tmResourceId,
address)));
metricQueryServiceAddressFutures.add(metricQueryServiceAddressFuture);
}
return FutureUtils.combineAll(metricQueryServiceAddressFutures)
.thenApply(
collection ->
collection.stream()
.filter(Optional::isPresent)
.map(Optional::get)
.collect(Collectors.toList()));
}
@Override
public CompletableFuture<TransientBlobKey> requestTaskManagerFileUploadByType(
ResourceID taskManagerId, FileType fileType, Time timeout) {
log.debug(
"Request {} file upload from TaskExecutor {}.",
fileType,
taskManagerId.getStringWithMetadata());
final WorkerRegistration<WorkerType> taskExecutor = taskExecutors.get(taskManagerId);
if (taskExecutor == null) {
log.debug(
"Request upload of file {} from unregistered TaskExecutor {}.",
fileType,
taskManagerId.getStringWithMetadata());
return FutureUtils.completedExceptionally(
new UnknownTaskExecutorException(taskManagerId));
} else {
return taskExecutor.getTaskExecutorGateway().requestFileUploadByType(fileType, timeout);
}
}
@Override
public CompletableFuture<TransientBlobKey> requestTaskManagerFileUploadByName(
ResourceID taskManagerId, String fileName, Time timeout) {
log.debug(
"Request upload of file {} from TaskExecutor {}.",
fileName,
taskManagerId.getStringWithMetadata());
final WorkerRegistration<WorkerType> taskExecutor = taskExecutors.get(taskManagerId);
if (taskExecutor == null) {
log.debug(
"Request upload of file {} from unregistered TaskExecutor {}.",
fileName,
taskManagerId.getStringWithMetadata());
return FutureUtils.completedExceptionally(
new UnknownTaskExecutorException(taskManagerId));
} else {
return taskExecutor.getTaskExecutorGateway().requestFileUploadByName(fileName, timeout);
}
}
@Override
public CompletableFuture<Collection<LogInfo>> requestTaskManagerLogList(
ResourceID taskManagerId, Time timeout) {
final WorkerRegistration<WorkerType> taskExecutor = taskExecutors.get(taskManagerId);
if (taskExecutor == null) {
log.debug(
"Requested log list from unregistered TaskExecutor {}.",
taskManagerId.getStringWithMetadata());
return FutureUtils.completedExceptionally(
new UnknownTaskExecutorException(taskManagerId));
} else {
return taskExecutor.getTaskExecutorGateway().requestLogList(timeout);
}
}
@Override
public CompletableFuture<Void> releaseClusterPartitions(IntermediateDataSetID dataSetId) {
return clusterPartitionTracker.releaseClusterPartitions(dataSetId);
}
@Override
public CompletableFuture<Map<IntermediateDataSetID, DataSetMetaInfo>> listDataSets() {
return CompletableFuture.completedFuture(clusterPartitionTracker.listDataSets());
}
@Override
public CompletableFuture<ThreadDumpInfo> requestThreadDump(
ResourceID taskManagerId, Time timeout) {
final WorkerRegistration<WorkerType> taskExecutor = taskExecutors.get(taskManagerId);
if (taskExecutor == null) {
log.debug(
"Requested thread dump from unregistered TaskExecutor {}.",
taskManagerId.getStringWithMetadata());
return FutureUtils.completedExceptionally(
new UnknownTaskExecutorException(taskManagerId));
} else {
return taskExecutor.getTaskExecutorGateway().requestThreadDump(timeout);
}
}
@Override
public CompletableFuture<TaskExecutorThreadInfoGateway> requestTaskExecutorThreadInfoGateway(
ResourceID taskManagerId, Time timeout) {
final WorkerRegistration<WorkerType> taskExecutor = taskExecutors.get(taskManagerId);
if (taskExecutor == null) {
return FutureUtils.completedExceptionally(
new UnknownTaskExecutorException(taskManagerId));
} else {
return CompletableFuture.completedFuture(taskExecutor.getTaskExecutorGateway());
}
}
/**
* Registers a new JobMaster.
*
* @param jobMasterGateway to communicate with the registering JobMaster
* @param jobId of the job for which the JobMaster is responsible
* @param jobManagerAddress address of the JobMaster
* @param jobManagerResourceId ResourceID of the JobMaster
* @return RegistrationResponse
*/
private RegistrationResponse registerJobMasterInternal(
final JobMasterGateway jobMasterGateway,
JobID jobId,
String jobManagerAddress,
ResourceID jobManagerResourceId) {
if (jobManagerRegistrations.containsKey(jobId)) {
JobManagerRegistration oldJobManagerRegistration = jobManagerRegistrations.get(jobId);
if (Objects.equals(
oldJobManagerRegistration.getJobMasterId(),
jobMasterGateway.getFencingToken())) {
log.debug(
"Job manager {}@{} was already registered.",
jobMasterGateway.getFencingToken(),
jobManagerAddress);
} else {
closeJobManagerConnection(
oldJobManagerRegistration.getJobID(),
ResourceRequirementHandling.RETAIN,
new Exception("New job leader for job " + jobId + " found."));
JobManagerRegistration jobManagerRegistration =
new JobManagerRegistration(jobId, jobManagerResourceId, jobMasterGateway);
jobManagerRegistrations.put(jobId, jobManagerRegistration);
jmResourceIdRegistrations.put(jobManagerResourceId, jobManagerRegistration);
}
} else {
JobManagerRegistration jobManagerRegistration =
new JobManagerRegistration(jobId, jobManagerResourceId, jobMasterGateway);
jobManagerRegistrations.put(jobId, jobManagerRegistration);
jmResourceIdRegistrations.put(jobManagerResourceId, jobManagerRegistration);
}
log.info(
"Registered job manager {}@{} for job {}.",
jobMasterGateway.getFencingToken(),
jobManagerAddress,
jobId);
jobManagerHeartbeatManager.monitorTarget(
jobManagerResourceId, new JobMasterHeartbeatSender(jobMasterGateway));
return new JobMasterRegistrationSuccess(getFencingToken(), resourceId);
}
/**
* Registers a new TaskExecutor.
*
* @param taskExecutorRegistration task executor registration parameters
* @return RegistrationResponse
*/
private RegistrationResponse registerTaskExecutorInternal(
TaskExecutorGateway taskExecutorGateway,
TaskExecutorRegistration taskExecutorRegistration) {
ResourceID taskExecutorResourceId = taskExecutorRegistration.getResourceId();
WorkerRegistration<WorkerType> oldRegistration =
taskExecutors.remove(taskExecutorResourceId);
if (oldRegistration != null) {
log.debug(
"Replacing old registration of TaskExecutor {}.",
taskExecutorResourceId.getStringWithMetadata());
slotManager.unregisterTaskManager(
oldRegistration.getInstanceID(),
new ResourceManagerException(
String.format(
"TaskExecutor %s re-connected to the ResourceManager.",
taskExecutorResourceId.getStringWithMetadata())));
}
final WorkerType newWorker = workerStarted(taskExecutorResourceId);
String taskExecutorAddress = taskExecutorRegistration.getTaskExecutorAddress();
if (newWorker == null) {
log.warn(
"Discard registration from TaskExecutor {} at ({}) because the framework did "
+ "not recognize it",
taskExecutorResourceId.getStringWithMetadata(),
taskExecutorAddress);
return new TaskExecutorRegistrationRejection(
"The ResourceManager does not recognize this TaskExecutor.");
} else {
WorkerRegistration<WorkerType> registration =
new WorkerRegistration<>(
taskExecutorGateway,
newWorker,
taskExecutorRegistration.getDataPort(),
taskExecutorRegistration.getJmxPort(),
taskExecutorRegistration.getHardwareDescription(),
taskExecutorRegistration.getMemoryConfiguration(),
taskExecutorRegistration.getTotalResourceProfile(),
taskExecutorRegistration.getDefaultSlotResourceProfile());
log.info(
"Registering TaskManager with ResourceID {} ({}) at ResourceManager",
taskExecutorResourceId.getStringWithMetadata(),
taskExecutorAddress);
taskExecutors.put(taskExecutorResourceId, registration);
taskManagerHeartbeatManager.monitorTarget(
taskExecutorResourceId, new TaskExecutorHeartbeatSender(taskExecutorGateway));
return new TaskExecutorRegistrationSuccess(
registration.getInstanceID(), resourceId, clusterInformation);
}
}
protected void registerMetrics() {
resourceManagerMetricGroup.gauge(
MetricNames.NUM_REGISTERED_TASK_MANAGERS, () -> (long) taskExecutors.size());
}
private void clearStateInternal() {
jobManagerRegistrations.clear();
jmResourceIdRegistrations.clear();
taskExecutors.clear();
try {
jobLeaderIdService.clear();
} catch (Exception e) {
onFatalError(
new ResourceManagerException(
"Could not properly clear the job leader id service.", e));
}
}
/**
* This method should be called by the framework once it detects that a currently registered job
* manager has failed.
*
* @param jobId identifying the job whose leader shall be disconnected.
* @param resourceRequirementHandling indicating how existing resource requirements for the
* corresponding job should be handled
* @param cause The exception which cause the JobManager failed.
*/
protected void closeJobManagerConnection(
JobID jobId, ResourceRequirementHandling resourceRequirementHandling, Exception cause) {
JobManagerRegistration jobManagerRegistration = jobManagerRegistrations.remove(jobId);
if (jobManagerRegistration != null) {
final ResourceID jobManagerResourceId =
jobManagerRegistration.getJobManagerResourceID();
final JobMasterGateway jobMasterGateway = jobManagerRegistration.getJobManagerGateway();
final JobMasterId jobMasterId = jobManagerRegistration.getJobMasterId();
log.info(
"Disconnect job manager {}@{} for job {} from the resource manager.",
jobMasterId,
jobMasterGateway.getAddress(),
jobId);
jobManagerHeartbeatManager.unmonitorTarget(jobManagerResourceId);
jmResourceIdRegistrations.remove(jobManagerResourceId);
if (resourceRequirementHandling == ResourceRequirementHandling.CLEAR) {
slotManager.clearResourceRequirements(jobId);
}
jobMasterGateway.disconnectResourceManager(getFencingToken(), cause);
} else {
log.debug("There was no registered job manager for job {}.", jobId);
}
}
/**
* This method should be called by the framework once it detects that a currently registered
* task executor has failed.
*
* @param resourceID Id of the TaskManager that has failed.
* @param cause The exception which cause the TaskManager failed.
* @return The {@link WorkerType} of the closed connection, or empty if already removed.
*/
protected Optional<WorkerType> closeTaskManagerConnection(
final ResourceID resourceID, final Exception cause) {
taskManagerHeartbeatManager.unmonitorTarget(resourceID);
WorkerRegistration<WorkerType> workerRegistration = taskExecutors.remove(resourceID);
if (workerRegistration != null) {
log.info(
"Closing TaskExecutor connection {} because: {}",
resourceID.getStringWithMetadata(),
cause.getMessage());
slotManager.unregisterTaskManager(workerRegistration.getInstanceID(), cause);
clusterPartitionTracker.processTaskExecutorShutdown(resourceID);
workerRegistration.getTaskExecutorGateway().disconnectResourceManager(cause);
} else {
log.debug(
"No open TaskExecutor connection {}. Ignoring close TaskExecutor connection. Closing reason was: {}",
resourceID.getStringWithMetadata(),
cause.getMessage());
}
return Optional.ofNullable(workerRegistration).map(WorkerRegistration::getWorker);
}
protected void removeJob(JobID jobId, Exception cause) {
try {
jobLeaderIdService.removeJob(jobId);
} catch (Exception e) {
log.warn(
"Could not properly remove the job {} from the job leader id service.",
jobId,
e);
}
if (jobManagerRegistrations.containsKey(jobId)) {
closeJobManagerConnection(jobId, ResourceRequirementHandling.CLEAR, cause);
}
}
protected void jobLeaderLostLeadership(JobID jobId, JobMasterId oldJobMasterId) {
if (jobManagerRegistrations.containsKey(jobId)) {
JobManagerRegistration jobManagerRegistration = jobManagerRegistrations.get(jobId);
if (Objects.equals(jobManagerRegistration.getJobMasterId(), oldJobMasterId)) {
closeJobManagerConnection(
jobId,
ResourceRequirementHandling.RETAIN,
new Exception("Job leader lost leadership."));
} else {
log.debug(
"Discarding job leader lost leadership, because a new job leader was found for job {}. ",
jobId);
}
} else {
log.debug(
"Discard job leader lost leadership for outdated leader {} for job {}.",
oldJobMasterId,
jobId);
}
}
protected void releaseResource(InstanceID instanceId, Exception cause) {
WorkerType worker = null;
for (Map.Entry<ResourceID, WorkerRegistration<WorkerType>> entry :
taskExecutors.entrySet()) {
if (entry.getValue().getInstanceID().equals(instanceId)) {
worker = entry.getValue().getWorker();
break;
}
}
if (worker != null) {
if (stopWorker(worker)) {
closeTaskManagerConnection(worker.getResourceID(), cause);
} else {
log.debug(
"Worker {} could not be stopped.",
worker.getResourceID().getStringWithMetadata());
}
} else {
slotManager.unregisterTaskManager(instanceId, cause);
}
}
private enum ResourceRequirementHandling {
RETAIN,
CLEAR
}
/**
* Notifies the ResourceManager that a fatal error has occurred and it cannot proceed.
*
* @param t The exception describing the fatal error
*/
protected void onFatalError(Throwable t) {
try {
log.error("Fatal error occurred in ResourceManager.", t);
} catch (Throwable ignored) {
}
fatalErrorHandler.onFatalError(t);
}
private void startHeartbeatServices() {
taskManagerHeartbeatManager =
heartbeatServices.createHeartbeatManagerSender(
resourceId,
new TaskManagerHeartbeatListener(),
getMainThreadExecutor(),
log);
jobManagerHeartbeatManager =
heartbeatServices.createHeartbeatManagerSender(
resourceId,
new JobManagerHeartbeatListener(),
getMainThreadExecutor(),
log);
}
private void stopHeartbeatServices() {
taskManagerHeartbeatManager.stop();
jobManagerHeartbeatManager.stop();
}
/**
* Initializes the framework specific components.
*
* @throws ResourceManagerException which occurs during initialization and causes the resource
* manager to fail.
*/
protected abstract void initialize() throws ResourceManagerException;
/**
* Terminates the framework specific components.
*
* @throws Exception which occurs during termination.
*/
protected abstract void terminate() throws Exception;
/**
* The framework specific code to deregister the application. This should report the
* application's final status and shut down the resource manager cleanly.
*
* <p>This method also needs to make sure all pending containers that are not registered yet are
* returned.
*
* @param finalStatus The application status to report.
* @param optionalDiagnostics A diagnostics message or {@code null}.
* @throws ResourceManagerException if the application could not be shut down.
*/
protected abstract void internalDeregisterApplication(
ApplicationStatus finalStatus, @Nullable String optionalDiagnostics)
throws ResourceManagerException;
/**
* Allocates a resource using the worker resource specification.
*
* @param workerResourceSpec workerResourceSpec specifies the size of the to be allocated
* resource
* @return whether the resource can be allocated
*/
@VisibleForTesting
public abstract boolean startNewWorker(WorkerResourceSpec workerResourceSpec);
/**
* Callback when a worker was started.
*
* @param resourceID The worker resource id
*/
protected abstract WorkerType workerStarted(ResourceID resourceID);
/**
* Stops the given worker.
*
* @param worker The worker.
* @return True if the worker was stopped, otherwise false
*/
public abstract boolean stopWorker(WorkerType worker);
/**
* Set {@link SlotManager} whether to fail unfulfillable slot requests.
*
* @param failUnfulfillableRequest whether to fail unfulfillable requests
*/
protected void setFailUnfulfillableRequest(boolean failUnfulfillableRequest) {
slotManager.setFailUnfulfillableRequest(failUnfulfillableRequest);
}
private static final class JobMasterHeartbeatSender extends HeartbeatSender<Void> {
private final JobMasterGateway jobMasterGateway;
private JobMasterHeartbeatSender(JobMasterGateway jobMasterGateway) {
this.jobMasterGateway = jobMasterGateway;
}
@Override
public CompletableFuture<Void> requestHeartbeat(ResourceID resourceID, Void payload) {
return jobMasterGateway.heartbeatFromResourceManager(resourceID);
}
}
private static final class TaskExecutorHeartbeatSender extends HeartbeatSender<Void> {
private final TaskExecutorGateway taskExecutorGateway;
private TaskExecutorHeartbeatSender(TaskExecutorGateway taskExecutorGateway) {
this.taskExecutorGateway = taskExecutorGateway;
}
@Override
public CompletableFuture<Void> requestHeartbeat(ResourceID resourceID, Void payload) {
return taskExecutorGateway.heartbeatFromResourceManager(resourceID);
}
}
private class ResourceActionsImpl implements ResourceActions {
@Override
public void releaseResource(InstanceID instanceId, Exception cause) {
validateRunsInMainThread();
ResourceManager.this.releaseResource(instanceId, cause);
}
@Override
public boolean allocateResource(WorkerResourceSpec workerResourceSpec) {
validateRunsInMainThread();
return startNewWorker(workerResourceSpec);
}
@Override
public void notifyAllocationFailure(
JobID jobId, AllocationID allocationId, Exception cause) {
validateRunsInMainThread();
JobManagerRegistration jobManagerRegistration = jobManagerRegistrations.get(jobId);
if (jobManagerRegistration != null) {
jobManagerRegistration
.getJobManagerGateway()
.notifyAllocationFailure(allocationId, cause);
}
}
@Override
public void notifyNotEnoughResourcesAvailable(
JobID jobId, Collection<ResourceRequirement> acquiredResources) {
validateRunsInMainThread();
JobManagerRegistration jobManagerRegistration = jobManagerRegistrations.get(jobId);
if (jobManagerRegistration != null) {
jobManagerRegistration
.getJobManagerGateway()
.notifyNotEnoughResourcesAvailable(acquiredResources);
}
}
}
private class JobLeaderIdActionsImpl implements JobLeaderIdActions {
@Override
public void jobLeaderLostLeadership(final JobID jobId, final JobMasterId oldJobMasterId) {
runAsync(
new Runnable() {
@Override
public void run() {
ResourceManager.this.jobLeaderLostLeadership(jobId, oldJobMasterId);
}
});
}
@Override
public void notifyJobTimeout(final JobID jobId, final UUID timeoutId) {
runAsync(
new Runnable() {
@Override
public void run() {
if (jobLeaderIdService.isValidTimeout(jobId, timeoutId)) {
removeJob(
jobId,
new Exception(
"Job " + jobId + "was removed because of timeout"));
}
}
});
}
@Override
public void handleError(Throwable error) {
onFatalError(error);
}
}
private class TaskManagerHeartbeatListener
implements HeartbeatListener<TaskExecutorHeartbeatPayload, Void> {
@Override
public void notifyHeartbeatTimeout(final ResourceID resourceID) {
final String message =
String.format(
"The heartbeat of TaskManager with id %s timed out.",
resourceID.getStringWithMetadata());
log.info(message);
handleTaskManagerConnectionLoss(resourceID, new TimeoutException(message));
}
private void handleTaskManagerConnectionLoss(ResourceID resourceID, Exception cause) {
validateRunsInMainThread();
closeTaskManagerConnection(resourceID, cause)
.ifPresent(ResourceManager.this::stopWorker);
}
@Override
public void notifyTargetUnreachable(ResourceID resourceID) {
final String message =
String.format(
"TaskManager with id %s is no longer reachable.",
resourceID.getStringWithMetadata());
log.info(message);
handleTaskManagerConnectionLoss(resourceID, new ResourceManagerException(message));
}
@Override
public void reportPayload(
final ResourceID resourceID, final TaskExecutorHeartbeatPayload payload) {
validateRunsInMainThread();
final WorkerRegistration<WorkerType> workerRegistration = taskExecutors.get(resourceID);
if (workerRegistration == null) {
log.debug(
"Received slot report from TaskManager {} which is no longer registered.",
resourceID.getStringWithMetadata());
} else {
InstanceID instanceId = workerRegistration.getInstanceID();
slotManager.reportSlotStatus(instanceId, payload.getSlotReport());
clusterPartitionTracker.processTaskExecutorClusterPartitionReport(
resourceID, payload.getClusterPartitionReport());
}
}
@Override
public Void retrievePayload(ResourceID resourceID) {
return null;
}
}
private class JobManagerHeartbeatListener implements HeartbeatListener<Void, Void> {
@Override
public void notifyHeartbeatTimeout(final ResourceID resourceID) {
final String message =
String.format(
"The heartbeat of JobManager with id %s timed out.",
resourceID.getStringWithMetadata());
log.info(message);
handleJobManagerConnectionLoss(resourceID, new TimeoutException(message));
}
private void handleJobManagerConnectionLoss(ResourceID resourceID, Exception cause) {
validateRunsInMainThread();
if (jmResourceIdRegistrations.containsKey(resourceID)) {
JobManagerRegistration jobManagerRegistration =
jmResourceIdRegistrations.get(resourceID);
if (jobManagerRegistration != null) {
closeJobManagerConnection(
jobManagerRegistration.getJobID(),
ResourceRequirementHandling.RETAIN,
cause);
}
}
}
@Override
public void notifyTargetUnreachable(ResourceID resourceID) {
final String message =
String.format(
"JobManager with id %s is no longer reachable.",
resourceID.getStringWithMetadata());
log.info(message);
handleJobManagerConnectionLoss(resourceID, new ResourceManagerException(message));
}
@Override
public void reportPayload(ResourceID resourceID, Void payload) {
}
@Override
public Void retrievePayload(ResourceID resourceID) {
return null;
}
}
protected Map<WorkerResourceSpec, Integer> getRequiredResources() {
return slotManager.getRequiredResources();
}
} |
Thanks @iemejia . Do suggest if there any other changes needed. | public void finishBundle(FinishBundleContext context) throws Exception {
Multimap<BoundedWindow, KV<String, String>> kvs = fetchAndFlush();
for (BoundedWindow w : kvs.keySet()) {
for (KV<String, String> kv : kvs.get(w)) {
context.output(kv, w.maxTimestamp(), w);
}
}
} | Multimap<BoundedWindow, KV<String, String>> kvs = fetchAndFlush(); | public void finishBundle(FinishBundleContext context) throws Exception {
Multimap<BoundedWindow, KV<String, String>> kvs = fetchAndFlush();
for (BoundedWindow w : kvs.keySet()) {
for (KV<String, String> kv : kvs.get(w)) {
context.output(kv, w.maxTimestamp(), w);
}
}
} | class ReadFn extends BaseReadFn<KV<String, String>> {
@Nullable transient Multimap<BoundedWindow, String> bundles = null;
@Nullable AtomicInteger batchCount = null;
private final int batchSize;
@StartBundle
public void startBundle(StartBundleContext context) {
bundles = ArrayListMultimap.create();
batchCount = new AtomicInteger();
}
ReadFn(RedisConnectionConfiguration connectionConfiguration, int batchSize) {
super(connectionConfiguration);
this.batchSize = batchSize;
}
private int getBatchSize() {
return batchSize;
}
@ProcessElement
public void processElement(ProcessContext processContext, BoundedWindow window)
throws Exception {
String key = processContext.element();
bundles.put(window, key);
if (batchCount.incrementAndGet() > getBatchSize()) {
Multimap<BoundedWindow, KV<String, String>> kvs = fetchAndFlush();
for (BoundedWindow w : kvs.keySet()) {
for (KV<String, String> kv : kvs.get(w)) {
processContext.output(kv);
}
}
}
}
private Multimap<BoundedWindow, KV<String, String>> fetchAndFlush() {
Multimap<BoundedWindow, KV<String, String>> kvs = ArrayListMultimap.create();
for (BoundedWindow w : bundles.keySet()) {
String[] keys = new String[bundles.get(w).size()];
bundles.get(w).toArray(keys);
List<String> results = jedis.mget(keys);
for (int i = 0; i < results.size(); i++) {
if (results.get(i) != null) {
kvs.put(w, KV.of(keys[i], results.get(i)));
}
}
}
bundles = ArrayListMultimap.create();
batchCount.set(0);
return kvs;
}
@FinishBundle
} | class ReadFn extends BaseReadFn<KV<String, String>> {
@Nullable transient Multimap<BoundedWindow, String> bundles = null;
@Nullable AtomicInteger batchCount = null;
private final int batchSize;
@StartBundle
public void startBundle(StartBundleContext context) {
bundles = ArrayListMultimap.create();
batchCount = new AtomicInteger();
}
ReadFn(RedisConnectionConfiguration connectionConfiguration, int batchSize) {
super(connectionConfiguration);
this.batchSize = batchSize;
}
private int getBatchSize() {
return batchSize;
}
@ProcessElement
public void processElement(ProcessContext processContext, BoundedWindow window)
throws Exception {
String key = processContext.element();
bundles.put(window, key);
if (batchCount.incrementAndGet() > getBatchSize()) {
Multimap<BoundedWindow, KV<String, String>> kvs = fetchAndFlush();
for (BoundedWindow w : kvs.keySet()) {
for (KV<String, String> kv : kvs.get(w)) {
processContext.output(kv);
}
}
}
}
private Multimap<BoundedWindow, KV<String, String>> fetchAndFlush() {
Multimap<BoundedWindow, KV<String, String>> kvs = ArrayListMultimap.create();
for (BoundedWindow w : bundles.keySet()) {
String[] keys = new String[bundles.get(w).size()];
bundles.get(w).toArray(keys);
List<String> results = jedis.mget(keys);
for (int i = 0; i < results.size(); i++) {
if (results.get(i) != null) {
kvs.put(w, KV.of(keys[i], results.get(i)));
}
}
}
bundles = ArrayListMultimap.create();
batchCount.set(0);
return kvs;
}
@FinishBundle
} |
Got it. I have changed the implementation of testTotalAttemptsPerUpload, and updated the pr. | public UploadTasksResult upload(Collection<UploadTask> tasks) throws IOException {
Map<UploadTask, Map<StateChangeSet, Long>> map = new HashMap<>();
try {
TimeUnit.MILLISECONDS.sleep(latency);
} catch (InterruptedException ignored) {
Thread.currentThread().interrupt();
}
for (UploadTask uploadTask : tasks) {
map.put(
uploadTask,
uploadTask.changeSets.stream()
.collect(Collectors.toMap(Function.identity(), ign -> 0L)));
}
return new UploadTasksResult(map, new EmptyStreamStateHandle());
} | TimeUnit.MILLISECONDS.sleep(latency); | public UploadTasksResult upload(Collection<UploadTask> tasks) throws IOException {
Map<UploadTask, Map<StateChangeSet, Long>> map = new HashMap<>();
for (UploadTask uploadTask : tasks) {
int currentAttempt = 1 + attemptsPerTask.getOrDefault(uploadTask, 0);
if (currentAttempt == maxAttempts) {
attemptsPerTask.remove(uploadTask);
map.put(
uploadTask,
uploadTask.changeSets.stream()
.collect(Collectors.toMap(Function.identity(), ign -> 0L)));
} else {
attemptsPerTask.put(uploadTask, currentAttempt);
throw new IOException();
}
}
return new UploadTasksResult(map, new EmptyStreamStateHandle());
} | class MaxAttemptUploader implements StateChangeUploader {
private final Map<UploadTask, Integer> attemptsPerTask;
private final int maxAttempts;
public MaxAttemptUploader(int maxAttempts) {
this.maxAttempts = maxAttempts;
this.attemptsPerTask = new HashMap<>();
}
@Override
@Override
public void close() {
attemptsPerTask.clear();
}
} | class MaxAttemptUploader implements StateChangeUploader {
private final Map<UploadTask, Integer> attemptsPerTask;
private final int maxAttempts;
public MaxAttemptUploader(int maxAttempts) {
this.maxAttempts = maxAttempts;
this.attemptsPerTask = new HashMap<>();
}
@Override
@Override
public void close() {
attemptsPerTask.clear();
}
} |
The overload this is calling is checking for null. So, it's not required to check here again. | public EventData(BinaryData body) {
this(Objects.requireNonNull(body, "'body' cannot be null."), new SystemProperties(), Context.NONE);
} | this(Objects.requireNonNull(body, "'body' cannot be null."), new SystemProperties(), Context.NONE); | public EventData(BinaryData body) {
this(body, new SystemProperties(), Context.NONE);
} | class EventData {
/*
* These are properties owned by the service and set when a message is received.
*/
static final Set<String> RESERVED_SYSTEM_PROPERTIES;
private final Map<String, Object> properties;
private final BinaryData body;
private final SystemProperties systemProperties;
private Context context;
static {
final Set<String> properties = new HashSet<>();
properties.add(OFFSET_ANNOTATION_NAME.getValue());
properties.add(PARTITION_KEY_ANNOTATION_NAME.getValue());
properties.add(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue());
properties.add(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue());
properties.add(PUBLISHER_ANNOTATION_NAME.getValue());
RESERVED_SYSTEM_PROPERTIES = Collections.unmodifiableSet(properties);
}
/**
* Creates an event containing the {@code body}.
*
* @param body The data to set for this event.
* @throws NullPointerException if {@code body} is {@code null}.
*/
public EventData(byte[] body) {
this(BinaryData.fromBytes(Objects.requireNonNull(body, "'body' cannot be null.")));
}
/**
* Creates an event containing the {@code body}.
*
* @param body The data to set for this event.
* @throws NullPointerException if {@code body} is {@code null}.
*/
public EventData(ByteBuffer body) {
this(Objects.requireNonNull(body, "'body' cannot be null.").array());
}
/**
* Creates an event by encoding the {@code body} using UTF-8 charset.
*
* @param body The string that will be UTF-8 encoded to create an event.
* @throws NullPointerException if {@code body} is {@code null}.
*/
public EventData(String body) {
this(Objects.requireNonNull(body, "'body' cannot be null.").getBytes(UTF_8));
}
/**
* Creates an event with the provided {@link BinaryData} as payload.
*
* @param body The {@link BinaryData} payload for this event.
*/
/**
* Creates an event with the given {@code body}, system properties and context.
*
* @param body The data to set for this event.
* @param systemProperties System properties set by message broker for this event.
* @param context A specified key-value pair of type {@link Context}.
* @throws NullPointerException if {@code body}, {@code systemProperties}, or {@code context} is {@code null}.
*/
EventData(BinaryData body, SystemProperties systemProperties, Context context) {
this.body = Objects.requireNonNull(body, "'body' cannot be null.");
this.context = Objects.requireNonNull(context, "'context' cannot be null.");
this.systemProperties = Objects.requireNonNull(systemProperties, "'systemProperties' cannot be null.");
this.properties = new HashMap<>();
}
/**
* Gets the set of free-form event properties which may be used for passing metadata associated with the event with
* the event body during Event Hubs operations. A common use-case for {@code properties()} is to associate
* serialization hints for the {@link
*
* <p><strong>Adding serialization hint using {@code getProperties()}</strong></p>
* <p>In the sample, the type of telemetry is indicated by adding an application property with key "eventType".</p>
*
* {@codesnippet com.azure.messaging.eventhubs.eventdata.getProperties}
*
* @return Application properties associated with this {@link EventData}.
*/
public Map<String, Object> getProperties() {
return properties;
}
/**
* Properties that are populated by Event Hubs service. As these are populated by the Event Hubs service, they are
* only present on a <b>received</b> {@link EventData}.
*
* @return An encapsulation of all system properties appended by EventHubs service into {@link EventData}.
* {@code null} if the {@link EventData} is not received from the Event Hubs service.
*/
public Map<String, Object> getSystemProperties() {
return systemProperties;
}
/**
* Gets the actual payload/data wrapped by EventData.
*
* <p>
* If the means for deserializing the raw data is not apparent to consumers, a common technique is to make use of
* {@link
* wish to deserialize the binary data.
* </p>
*
* @return A byte array representing the data.
*/
public byte[] getBody() {
return body.toBytes();
}
/**
* Returns event data as UTF-8 decoded string.
*
* @return UTF-8 decoded string representation of the event data.
*/
public String getBodyAsString() {
return new String(body.toBytes(), UTF_8);
}
/**
* Returns the {@link BinaryData} payload associated with this event.
*
* @return the {@link BinaryData} payload associated with this event.
*/
public BinaryData getBodyAsBinaryData() {
return body;
}
/**
* Gets the offset of the event when it was received from the associated Event Hub partition. This is only present
* on a <b>received</b> {@link EventData}.
*
* @return The offset within the Event Hub partition of the received event. {@code null} if the {@link EventData}
* was not received from Event Hubs service.
*/
public Long getOffset() {
return systemProperties.getOffset();
}
/**
* Gets the partition hashing key if it was set when originally publishing the event. If it exists, this value was
* used to compute a hash to select a partition to send the message to. This is only present on a <b>received</b>
* {@link EventData}.
*
* @return A partition key for this Event Data. {@code null} if the {@link EventData} was not received from Event
* Hubs service or there was no partition key set when the event was sent to the Event Hub.
*/
public String getPartitionKey() {
return systemProperties.getPartitionKey();
}
/**
* Gets the instant, in UTC, of when the event was enqueued in the Event Hub partition. This is only present on a
* <b>received</b> {@link EventData}.
*
* @return The instant, in UTC, this was enqueued in the Event Hub partition. {@code null} if the {@link EventData}
* was not received from Event Hubs service.
*/
public Instant getEnqueuedTime() {
return systemProperties.getEnqueuedTime();
}
/**
* Gets the sequence number assigned to the event when it was enqueued in the associated Event Hub partition. This
* is unique for every message received in the Event Hub partition. This is only present on a <b>received</b>
* {@link EventData}.
*
* @return The sequence number for this event. {@code null} if the {@link EventData} was not received from Event
* Hubs service.
*/
public Long getSequenceNumber() {
return systemProperties.getSequenceNumber();
}
/**
* {@inheritDoc}
*/
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
EventData eventData = (EventData) o;
return Arrays.equals(body.toBytes(), eventData.body.toBytes());
}
/**
* {@inheritDoc}
*/
@Override
public int hashCode() {
return Arrays.hashCode(body.toBytes());
}
/**
* A specified key-value pair of type {@link Context} to set additional information on the event.
*
* @return the {@link Context} object set on the event
*/
Context getContext() {
return context;
}
/**
* Adds a new key value pair to the existing context on Event Data.
*
* @param key The key for this context object
* @param value The value for this context object.
* @throws NullPointerException if {@code key} or {@code value} is null.
* @return The updated {@link EventData}.
*/
public EventData addContext(String key, Object value) {
Objects.requireNonNull(key, "The 'key' parameter cannot be null.");
Objects.requireNonNull(value, "The 'value' parameter cannot be null.");
this.context = context.addData(key, value);
return this;
}
/**
* A collection of properties populated by Azure Event Hubs service.
*/
static class SystemProperties extends HashMap<String, Object> {
private static final long serialVersionUID = -2827050124966993723L;
private final Long offset;
private final String partitionKey;
private final Instant enqueuedTime;
private final Long sequenceNumber;
SystemProperties() {
super();
offset = null;
partitionKey = null;
enqueuedTime = null;
sequenceNumber = null;
}
SystemProperties(final Map<String, Object> map) {
super(map);
this.partitionKey = removeSystemProperty(PARTITION_KEY_ANNOTATION_NAME.getValue());
final String offset = removeSystemProperty(OFFSET_ANNOTATION_NAME.getValue());
if (offset == null) {
throw new IllegalStateException(String.format(Locale.US,
"offset: %s should always be in map.", OFFSET_ANNOTATION_NAME.getValue()));
}
this.offset = Long.valueOf(offset);
put(OFFSET_ANNOTATION_NAME.getValue(), this.offset);
final Date enqueuedTimeValue = removeSystemProperty(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue());
if (enqueuedTimeValue == null) {
throw new IllegalStateException(String.format(Locale.US,
"enqueuedTime: %s should always be in map.", ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue()));
}
this.enqueuedTime = enqueuedTimeValue.toInstant();
put(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue(), this.enqueuedTime);
final Long sequenceNumber = removeSystemProperty(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue());
if (sequenceNumber == null) {
throw new IllegalStateException(String.format(Locale.US,
"sequenceNumber: %s should always be in map.", SEQUENCE_NUMBER_ANNOTATION_NAME.getValue()));
}
this.sequenceNumber = sequenceNumber;
put(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue(), this.sequenceNumber);
}
/**
* Gets the offset within the Event Hubs stream.
*
* @return The offset within the Event Hubs stream.
*/
private Long getOffset() {
return offset;
}
/**
* Gets a partition key used for message partitioning. If it exists, this value was used to compute a hash to
* select a partition to send the message to.
*
* @return A partition key for this Event Data.
*/
private String getPartitionKey() {
return partitionKey;
}
/**
* Gets the time this event was enqueued in the Event Hub.
*
* @return The time this was enqueued in the service.
*/
private Instant getEnqueuedTime() {
return enqueuedTime;
}
/**
* Gets the sequence number in the event stream for this event. This is unique for every message received in the
* Event Hub.
*
* @return Sequence number for this event.
* @throws IllegalStateException if {@link SystemProperties} does not contain the sequence number in a retrieved
* event.
*/
private Long getSequenceNumber() {
return sequenceNumber;
}
@SuppressWarnings("unchecked")
private <T> T removeSystemProperty(final String key) {
if (this.containsKey(key)) {
return (T) (this.remove(key));
}
return null;
}
}
} | class EventData {
/*
* These are properties owned by the service and set when a message is received.
*/
static final Set<String> RESERVED_SYSTEM_PROPERTIES;
private final Map<String, Object> properties;
private final BinaryData body;
private final SystemProperties systemProperties;
private Context context;
static {
final Set<String> properties = new HashSet<>();
properties.add(OFFSET_ANNOTATION_NAME.getValue());
properties.add(PARTITION_KEY_ANNOTATION_NAME.getValue());
properties.add(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue());
properties.add(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue());
properties.add(PUBLISHER_ANNOTATION_NAME.getValue());
RESERVED_SYSTEM_PROPERTIES = Collections.unmodifiableSet(properties);
}
/**
* Creates an event containing the {@code body}.
*
* @param body The data to set for this event.
* @throws NullPointerException if {@code body} is {@code null}.
*/
public EventData(byte[] body) {
this(BinaryData.fromBytes(Objects.requireNonNull(body, "'body' cannot be null.")));
}
/**
* Creates an event containing the {@code body}.
*
* @param body The data to set for this event.
* @throws NullPointerException if {@code body} is {@code null}.
*/
public EventData(ByteBuffer body) {
this(Objects.requireNonNull(body, "'body' cannot be null.").array());
}
/**
* Creates an event by encoding the {@code body} using UTF-8 charset.
*
* @param body The string that will be UTF-8 encoded to create an event.
* @throws NullPointerException if {@code body} is {@code null}.
*/
public EventData(String body) {
this(Objects.requireNonNull(body, "'body' cannot be null.").getBytes(UTF_8));
}
/**
* Creates an event with the provided {@link BinaryData} as payload.
*
* @param body The {@link BinaryData} payload for this event.
*/
/**
* Creates an event with the given {@code body}, system properties and context.
*
* @param body The data to set for this event.
* @param systemProperties System properties set by message broker for this event.
* @param context A specified key-value pair of type {@link Context}.
* @throws NullPointerException if {@code body}, {@code systemProperties}, or {@code context} is {@code null}.
*/
EventData(BinaryData body, SystemProperties systemProperties, Context context) {
this.body = Objects.requireNonNull(body, "'body' cannot be null.");
this.context = Objects.requireNonNull(context, "'context' cannot be null.");
this.systemProperties = Objects.requireNonNull(systemProperties, "'systemProperties' cannot be null.");
this.properties = new HashMap<>();
}
/**
* Gets the set of free-form event properties which may be used for passing metadata associated with the event with
* the event body during Event Hubs operations. A common use-case for {@code properties()} is to associate
* serialization hints for the {@link
*
* <p><strong>Adding serialization hint using {@code getProperties()}</strong></p>
* <p>In the sample, the type of telemetry is indicated by adding an application property with key "eventType".</p>
*
* {@codesnippet com.azure.messaging.eventhubs.eventdata.getProperties}
*
* @return Application properties associated with this {@link EventData}.
*/
public Map<String, Object> getProperties() {
return properties;
}
/**
* Properties that are populated by Event Hubs service. As these are populated by the Event Hubs service, they are
* only present on a <b>received</b> {@link EventData}.
*
* @return An encapsulation of all system properties appended by EventHubs service into {@link EventData}.
* {@code null} if the {@link EventData} is not received from the Event Hubs service.
*/
public Map<String, Object> getSystemProperties() {
return systemProperties;
}
/**
* Gets the actual payload/data wrapped by EventData.
*
* <p>
* If the means for deserializing the raw data is not apparent to consumers, a common technique is to make use of
* {@link
* wish to deserialize the binary data.
* </p>
*
* @return A byte array representing the data.
*/
public byte[] getBody() {
return body.toBytes();
}
/**
* Returns event data as UTF-8 decoded string.
*
* @return UTF-8 decoded string representation of the event data.
*/
public String getBodyAsString() {
return new String(body.toBytes(), UTF_8);
}
/**
* Returns the {@link BinaryData} payload associated with this event.
*
* @return the {@link BinaryData} payload associated with this event.
*/
public BinaryData getBodyAsBinaryData() {
return body;
}
/**
* Gets the offset of the event when it was received from the associated Event Hub partition. This is only present
* on a <b>received</b> {@link EventData}.
*
* @return The offset within the Event Hub partition of the received event. {@code null} if the {@link EventData}
* was not received from Event Hubs service.
*/
public Long getOffset() {
return systemProperties.getOffset();
}
/**
* Gets the partition hashing key if it was set when originally publishing the event. If it exists, this value was
* used to compute a hash to select a partition to send the message to. This is only present on a <b>received</b>
* {@link EventData}.
*
* @return A partition key for this Event Data. {@code null} if the {@link EventData} was not received from Event
* Hubs service or there was no partition key set when the event was sent to the Event Hub.
*/
public String getPartitionKey() {
return systemProperties.getPartitionKey();
}
/**
* Gets the instant, in UTC, of when the event was enqueued in the Event Hub partition. This is only present on a
* <b>received</b> {@link EventData}.
*
* @return The instant, in UTC, this was enqueued in the Event Hub partition. {@code null} if the {@link EventData}
* was not received from Event Hubs service.
*/
public Instant getEnqueuedTime() {
return systemProperties.getEnqueuedTime();
}
/**
* Gets the sequence number assigned to the event when it was enqueued in the associated Event Hub partition. This
* is unique for every message received in the Event Hub partition. This is only present on a <b>received</b>
* {@link EventData}.
*
* @return The sequence number for this event. {@code null} if the {@link EventData} was not received from Event
* Hubs service.
*/
public Long getSequenceNumber() {
return systemProperties.getSequenceNumber();
}
/**
* {@inheritDoc}
*/
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
EventData eventData = (EventData) o;
return Arrays.equals(body.toBytes(), eventData.body.toBytes());
}
/**
* {@inheritDoc}
*/
@Override
public int hashCode() {
return Arrays.hashCode(body.toBytes());
}
/**
* A specified key-value pair of type {@link Context} to set additional information on the event.
*
* @return the {@link Context} object set on the event
*/
Context getContext() {
return context;
}
/**
* Adds a new key value pair to the existing context on Event Data.
*
* @param key The key for this context object
* @param value The value for this context object.
* @throws NullPointerException if {@code key} or {@code value} is null.
* @return The updated {@link EventData}.
*/
public EventData addContext(String key, Object value) {
Objects.requireNonNull(key, "The 'key' parameter cannot be null.");
Objects.requireNonNull(value, "The 'value' parameter cannot be null.");
this.context = context.addData(key, value);
return this;
}
/**
* A collection of properties populated by Azure Event Hubs service.
*/
static class SystemProperties extends HashMap<String, Object> {
private static final long serialVersionUID = -2827050124966993723L;
private final Long offset;
private final String partitionKey;
private final Instant enqueuedTime;
private final Long sequenceNumber;
SystemProperties() {
super();
offset = null;
partitionKey = null;
enqueuedTime = null;
sequenceNumber = null;
}
SystemProperties(final Map<String, Object> map) {
super(map);
this.partitionKey = removeSystemProperty(PARTITION_KEY_ANNOTATION_NAME.getValue());
final String offset = removeSystemProperty(OFFSET_ANNOTATION_NAME.getValue());
if (offset == null) {
throw new IllegalStateException(String.format(Locale.US,
"offset: %s should always be in map.", OFFSET_ANNOTATION_NAME.getValue()));
}
this.offset = Long.valueOf(offset);
put(OFFSET_ANNOTATION_NAME.getValue(), this.offset);
final Date enqueuedTimeValue = removeSystemProperty(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue());
if (enqueuedTimeValue == null) {
throw new IllegalStateException(String.format(Locale.US,
"enqueuedTime: %s should always be in map.", ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue()));
}
this.enqueuedTime = enqueuedTimeValue.toInstant();
put(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue(), this.enqueuedTime);
final Long sequenceNumber = removeSystemProperty(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue());
if (sequenceNumber == null) {
throw new IllegalStateException(String.format(Locale.US,
"sequenceNumber: %s should always be in map.", SEQUENCE_NUMBER_ANNOTATION_NAME.getValue()));
}
this.sequenceNumber = sequenceNumber;
put(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue(), this.sequenceNumber);
}
/**
* Gets the offset within the Event Hubs stream.
*
* @return The offset within the Event Hubs stream.
*/
private Long getOffset() {
return offset;
}
/**
* Gets a partition key used for message partitioning. If it exists, this value was used to compute a hash to
* select a partition to send the message to.
*
* @return A partition key for this Event Data.
*/
private String getPartitionKey() {
return partitionKey;
}
/**
* Gets the time this event was enqueued in the Event Hub.
*
* @return The time this was enqueued in the service.
*/
private Instant getEnqueuedTime() {
return enqueuedTime;
}
/**
* Gets the sequence number in the event stream for this event. This is unique for every message received in the
* Event Hub.
*
* @return Sequence number for this event.
* @throws IllegalStateException if {@link SystemProperties} does not contain the sequence number in a retrieved
* event.
*/
private Long getSequenceNumber() {
return sequenceNumber;
}
@SuppressWarnings("unchecked")
private <T> T removeSystemProperty(final String key) {
if (this.containsKey(key)) {
return (T) (this.remove(key));
}
return null;
}
}
} |
I would recommend splitting this into two calls that are individually wrapper with `try-catch`. Otherwise we won't stop the `ioExecutor` if `futureExecutor.shutdownNow()` fails for some reason. | public void shutdown() throws Exception {
Throwable firstException = null;
try {
futureExecutor.shutdownNow();
ioExecutor.shutdownNow();
} catch (Throwable t) {
firstException = t;
}
try {
shuffleMaster.close();
} catch (Throwable t) {
firstException = firstException == null ? t : firstException;
}
libraryCacheManager.shutdown();
if (firstException != null) {
ExceptionUtils.rethrowException(
firstException, "Error while shutting down JobManager services");
}
} | ioExecutor.shutdownNow(); | public void shutdown() throws Exception {
Throwable exception = null;
try {
ExecutorUtils.gracefulShutdown(
SHUTDOWN_TIMEOUT.toMillis(), TimeUnit.MILLISECONDS, futureExecutor, ioExecutor);
} catch (Throwable t) {
exception = t;
}
try {
shuffleMaster.close();
} catch (Throwable t) {
exception = ExceptionUtils.firstOrSuppressed(t, exception);
}
libraryCacheManager.shutdown();
if (exception != null) {
ExceptionUtils.rethrowException(
exception, "Error while shutting down JobManager services");
}
} | class JobManagerSharedServices {
private final ScheduledExecutorService futureExecutor;
private final ScheduledExecutorService ioExecutor;
private final LibraryCacheManager libraryCacheManager;
private final ShuffleMaster<?> shuffleMaster;
@Nonnull private final BlobWriter blobWriter;
public JobManagerSharedServices(
ScheduledExecutorService futureExecutor,
ScheduledExecutorService ioExecutor,
LibraryCacheManager libraryCacheManager,
ShuffleMaster<?> shuffleMaster,
@Nonnull BlobWriter blobWriter) {
this.futureExecutor = checkNotNull(futureExecutor);
this.ioExecutor = checkNotNull(ioExecutor);
this.libraryCacheManager = checkNotNull(libraryCacheManager);
this.shuffleMaster = checkNotNull(shuffleMaster);
this.blobWriter = blobWriter;
}
public ScheduledExecutorService getFutureExecutor() {
return futureExecutor;
}
public ScheduledExecutorService getIoExecutor() {
return ioExecutor;
}
public LibraryCacheManager getLibraryCacheManager() {
return libraryCacheManager;
}
public ShuffleMaster<?> getShuffleMaster() {
return shuffleMaster;
}
@Nonnull
public BlobWriter getBlobWriter() {
return blobWriter;
}
/**
* Shutdown the {@link JobMaster} services.
*
* <p>This method makes sure all services are closed or shut down, even when an exception
* occurred in the shutdown of one component. The first encountered exception is thrown, with
* successive exceptions added as suppressed exceptions.
*
* @throws Exception The first Exception encountered during shutdown.
*/
public static JobManagerSharedServices fromConfiguration(
Configuration config, BlobServer blobServer, FatalErrorHandler fatalErrorHandler)
throws Exception {
checkNotNull(config);
checkNotNull(blobServer);
final String classLoaderResolveOrder =
config.getString(CoreOptions.CLASSLOADER_RESOLVE_ORDER);
final String[] alwaysParentFirstLoaderPatterns =
CoreOptions.getParentFirstLoaderPatterns(config);
final boolean failOnJvmMetaspaceOomError =
config.getBoolean(CoreOptions.FAIL_ON_USER_CLASS_LOADING_METASPACE_OOM);
final boolean checkClassLoaderLeak =
config.getBoolean(CoreOptions.CHECK_LEAKED_CLASSLOADER);
final BlobLibraryCacheManager libraryCacheManager =
new BlobLibraryCacheManager(
blobServer,
BlobLibraryCacheManager.defaultClassLoaderFactory(
FlinkUserCodeClassLoaders.ResolveOrder.fromString(
classLoaderResolveOrder),
alwaysParentFirstLoaderPatterns,
failOnJvmMetaspaceOomError ? fatalErrorHandler : null,
checkClassLoaderLeak));
final int numJobManagerFutureThreads =
config.getInteger(RestOptions.JOBMANAGER_FUTURE_THREADS);
final ScheduledExecutorService futureExecutor =
Executors.newScheduledThreadPool(
(numJobManagerFutureThreads != 0)
? numJobManagerFutureThreads
: Hardware.getNumberCPUCores(),
new ExecutorThreadFactory("jobmanager-future"));
final int numJobManagerIoThreads = config.getInteger(RestOptions.JOBMANAGER_IO_THREADS);
final ScheduledExecutorService ioExecutor =
Executors.newScheduledThreadPool(
(numJobManagerIoThreads != 0)
? numJobManagerIoThreads
: Hardware.getNumberCPUCores(),
new ExecutorThreadFactory("jobmanager-io"));
final ShuffleMasterContext shuffleMasterContext =
new ShuffleMasterContextImpl(config, fatalErrorHandler);
final ShuffleMaster<?> shuffleMaster =
ShuffleServiceLoader.loadShuffleServiceFactory(config)
.createShuffleMaster(shuffleMasterContext);
shuffleMaster.start();
return new JobManagerSharedServices(
futureExecutor, ioExecutor, libraryCacheManager, shuffleMaster, blobServer);
}
} | class JobManagerSharedServices {
private static final Duration SHUTDOWN_TIMEOUT = Duration.ofSeconds(10);
private final ScheduledExecutorService futureExecutor;
private final ExecutorService ioExecutor;
private final LibraryCacheManager libraryCacheManager;
private final ShuffleMaster<?> shuffleMaster;
@Nonnull private final BlobWriter blobWriter;
public JobManagerSharedServices(
ScheduledExecutorService futureExecutor,
ExecutorService ioExecutor,
LibraryCacheManager libraryCacheManager,
ShuffleMaster<?> shuffleMaster,
@Nonnull BlobWriter blobWriter) {
this.futureExecutor = checkNotNull(futureExecutor);
this.ioExecutor = checkNotNull(ioExecutor);
this.libraryCacheManager = checkNotNull(libraryCacheManager);
this.shuffleMaster = checkNotNull(shuffleMaster);
this.blobWriter = blobWriter;
}
public ScheduledExecutorService getFutureExecutor() {
return futureExecutor;
}
public Executor getIoExecutor() {
return ioExecutor;
}
public LibraryCacheManager getLibraryCacheManager() {
return libraryCacheManager;
}
public ShuffleMaster<?> getShuffleMaster() {
return shuffleMaster;
}
@Nonnull
public BlobWriter getBlobWriter() {
return blobWriter;
}
/**
* Shutdown the {@link JobMaster} services.
*
* <p>This method makes sure all services are closed or shut down, even when an exception
* occurred in the shutdown of one component. The first encountered exception is thrown, with
* successive exceptions added as suppressed exceptions.
*
* @throws Exception The first Exception encountered during shutdown.
*/
public static JobManagerSharedServices fromConfiguration(
Configuration config, BlobServer blobServer, FatalErrorHandler fatalErrorHandler)
throws Exception {
checkNotNull(config);
checkNotNull(blobServer);
final String classLoaderResolveOrder =
config.getString(CoreOptions.CLASSLOADER_RESOLVE_ORDER);
final String[] alwaysParentFirstLoaderPatterns =
CoreOptions.getParentFirstLoaderPatterns(config);
final boolean failOnJvmMetaspaceOomError =
config.getBoolean(CoreOptions.FAIL_ON_USER_CLASS_LOADING_METASPACE_OOM);
final boolean checkClassLoaderLeak =
config.getBoolean(CoreOptions.CHECK_LEAKED_CLASSLOADER);
final BlobLibraryCacheManager libraryCacheManager =
new BlobLibraryCacheManager(
blobServer,
BlobLibraryCacheManager.defaultClassLoaderFactory(
FlinkUserCodeClassLoaders.ResolveOrder.fromString(
classLoaderResolveOrder),
alwaysParentFirstLoaderPatterns,
failOnJvmMetaspaceOomError ? fatalErrorHandler : null,
checkClassLoaderLeak));
final int numberCPUCores = Hardware.getNumberCPUCores();
final int jobManagerFuturePoolSize =
config.getInteger(JobManagerOptions.JOB_MANAGER_FUTURE_POOL_SIZE, numberCPUCores);
final ScheduledExecutorService futureExecutor =
Executors.newScheduledThreadPool(
jobManagerFuturePoolSize, new ExecutorThreadFactory("jobmanager-future"));
final int jobManagerIoPoolSize =
config.getInteger(JobManagerOptions.JOB_MANAGER_IO_POOL_SIZE, numberCPUCores);
final ExecutorService ioExecutor =
Executors.newFixedThreadPool(
jobManagerIoPoolSize, new ExecutorThreadFactory("jobmanager-io"));
final ShuffleMasterContext shuffleMasterContext =
new ShuffleMasterContextImpl(config, fatalErrorHandler);
final ShuffleMaster<?> shuffleMaster =
ShuffleServiceLoader.loadShuffleServiceFactory(config)
.createShuffleMaster(shuffleMasterContext);
shuffleMaster.start();
return new JobManagerSharedServices(
futureExecutor, ioExecutor, libraryCacheManager, shuffleMaster, blobServer);
}
} |
And i don't want to add new string field just for the rare case where this API is called. | public String getConnectionMode() {
if (this.diagnostics == null) {
return "";
}
for (CosmosDiagnostics d: this.diagnostics) {
Collection<ClientSideRequestStatistics> clientStatsList = d.getClientSideRequestStatistics();
if (clientStatsList == null) {
continue;
}
Iterator<ClientSideRequestStatistics> iterator = clientStatsList.iterator();
if (!iterator.hasNext()) {
continue;
}
ClientSideRequestStatistics clientStats = iterator.next();
if (clientStats.getDiagnosticsClientConfig() == null) {
continue;
}
return clientStats.getDiagnosticsClientConfig().getConnectionMode().toString();
}
return "";
} | if (this.diagnostics == null) { | public String getConnectionMode() {
return this.connectionMode;
} | class CosmosDiagnosticsContext {
private final static ImplementationBridgeHelpers.CosmosDiagnosticsHelper.CosmosDiagnosticsAccessor diagAccessor =
ImplementationBridgeHelpers.CosmosDiagnosticsHelper.getCosmosDiagnosticsAccessor();
private final static ObjectMapper mapper = Utils.getSimpleObjectMapper();
private final String spanName;
private final String accountName;
private final String endpoint;
private final String databaseName;
private final String collectionName;
private final ResourceType resourceType;
private final String resourceTypeString;
private final OperationType operationType;
private final String operationTypeString;
private final ConsistencyLevel consistencyLevel;
private final ConcurrentLinkedDeque<CosmosDiagnostics> diagnostics;
private final Integer maxItemCount;
private final CosmosDiagnosticsThresholds thresholds;
private final String operationId;
private final String trackingId;
private Throwable finalError;
private Instant startTime = null;
private Duration duration = null;
private int statusCode = 0;
private int subStatusCode = 0;
private final AtomicInteger actualItemCount = new AtomicInteger(-1);
private float totalRequestCharge = 0;
private int maxRequestSize = 0;
private int maxResponseSize = 0;
private String cachedRequestDiagnostics = null;
private final AtomicBoolean isCompleted = new AtomicBoolean(false);
private Double samplingRateSnapshot;
private ArrayList<CosmosDiagnosticsRequestInfo> requestInfo = null;
CosmosDiagnosticsContext(
String spanName,
String accountName,
String endpoint,
String databaseName,
String collectionName,
ResourceType resourceType,
OperationType operationType,
String operationId,
ConsistencyLevel consistencyLevel,
Integer maxItemCount,
CosmosDiagnosticsThresholds thresholds,
String trackingId) {
checkNotNull(spanName, "Argument 'spanName' must not be null.");
checkNotNull(accountName, "Argument 'accountName' must not be null.");
checkNotNull(endpoint, "Argument 'endpoint' must not be null.");
checkNotNull(resourceType, "Argument 'resourceType' must not be null.");
checkNotNull(operationType, "Argument 'operationType' must not be null.");
checkNotNull(consistencyLevel, "Argument 'consistencyLevel' must not be null.");
checkNotNull(thresholds, "Argument 'thresholds' must not be null.");
this.spanName = spanName;
this.accountName = accountName;
this.endpoint = endpoint;
this.databaseName = databaseName != null ? databaseName : "";
this.collectionName = collectionName != null ? collectionName : "";
this.resourceType = resourceType;
this.resourceTypeString = resourceType.toString();
this.operationType = operationType;
this.operationTypeString = operationType.toString();
this.operationId = operationId != null ? operationId : "";
this.diagnostics = new ConcurrentLinkedDeque<>();
this.consistencyLevel = consistencyLevel;
this.maxItemCount = maxItemCount;
this.thresholds = thresholds;
this.trackingId = trackingId;
}
/**
* The name of the account related to the operation
* @return the name of the account related to the operation
*/
public String getAccountName() {
return this.accountName;
}
String getEndpoint() { return this.endpoint; }
/**
* The name of the database related to the operation
* @return the name of the database related to the operation
*/
public String getDatabaseName() {
return this.databaseName;
}
/**
* The name of the container related to the operation
* @return the name of the collection related to the operation
*/
public String getContainerName() {
return this.collectionName;
}
/**
* The resource type of the operation
* @return the resource type of the operation
*/
public String getResourceType() {
return this.resourceTypeString;
}
ResourceType getResourceTypeInternal() {
return this.resourceType;
}
/**
* The operation type of the operation
* @return the operation type of the operation
*/
public String getOperationType() {
return this.operationTypeString;
}
/**
* The trackingId of a write operation. Will be null for read-/query- or feed operations or when non-idempotent
* writes are disabled for writes or only enabled without trackingId propagation.
* @return the trackingId of an operation
*/
public String getTrackingId() {
return this.trackingId;
}
/**
* A flag indicating whether the operation is a point operation or not.
* @return a flag indicating whether the operation is a point operation or not.
*/
public boolean isPointOperation() {
return this.operationType.isPointOperation();
}
OperationType getOperationTypeInternal() {
return this.operationType;
}
/**
* The operation identifier of the operation - this can be used to
* add a dimension for feed operations - like queries -
* so, metrics and diagnostics can be separated for different query types etc.
* @return the operation identifier of the operation
*/
public String getOperationId() {
return this.operationId;
}
/**
* The effective consistency level of the operation
* @return the effective consistency level of the operation
*/
public ConsistencyLevel getEffectiveConsistencyLevel() {
return this.consistencyLevel;
}
/**
* The max. number of items requested in a feed operation
* @return the max. number of items requested in a feed operation. Will be null for point operations.
*/
public Integer getMaxItemCount() {
return this.maxItemCount;
}
/**
* The actual number of items returned by a feed operation
* @return the actual number of items returned by a feed operation. Will be null for point operations.
*/
public Integer getActualItemCount() {
int snapshot = this.actualItemCount.get();
if (snapshot < 0) {
return null;
}
return snapshot;
}
/**
* The span name as a logical identifier for an operation
* @return the span name as a logical identifier for an operation
*/
String getSpanName() {
return this.spanName;
}
/**
* Indicates whether the latency, request charge or payload size of the operation exceeded the given threshold
* @return a flag indicating whether the latency, request charge or payload size of the operation
* exceeded its threshold.
*/
public boolean isThresholdViolated() {
if (!this.isCompleted()) {
return false;
}
if (this.thresholds.isFailureCondition(this.statusCode, this.subStatusCode)) {
return true;
}
if (this.operationType.isPointOperation()) {
if (this.thresholds.getPointOperationLatencyThreshold().compareTo(this.duration) < 0) {
return true;
}
} else {
if (this.thresholds.getNonPointOperationLatencyThreshold().compareTo(this.duration) < 0) {
return true;
}
}
if (this.thresholds.getRequestChargeThreshold() < this.totalRequestCharge) {
return true;
}
return this.thresholds.getPayloadSizeThreshold() < Math.max(this.maxRequestSize, this.maxResponseSize);
}
void addDiagnostics(CosmosDiagnostics cosmosDiagnostics) {
checkNotNull(cosmosDiagnostics, "Argument 'cosmosDiagnostics' must not be null.");
synchronized (this.spanName) {
if (this.samplingRateSnapshot != null) {
diagAccessor.setSamplingRateSnapshot(cosmosDiagnostics, this.samplingRateSnapshot);
}
this.addRequestSize(diagAccessor.getRequestPayloadSizeInBytes(cosmosDiagnostics));
this.addResponseSize(diagAccessor.getTotalResponsePayloadSizeInBytes(cosmosDiagnostics));
this.diagnostics.add(cosmosDiagnostics);
this.cachedRequestDiagnostics = null;
this.requestInfo = null;
cosmosDiagnostics.setDiagnosticsContext(this);
}
}
Collection<ClientSideRequestStatistics> getDistinctCombinedClientSideRequestStatistics() {
DistinctClientSideRequestStatisticsCollection combinedClientSideRequestStatistics =
new DistinctClientSideRequestStatisticsCollection();
for (CosmosDiagnostics diagnostics: this.getDiagnostics()) {
combinedClientSideRequestStatistics.addAll(
diagnostics.getClientSideRequestStatistics());
FeedResponseDiagnostics feedResponseDiagnostics =
diagnostics.getFeedResponseDiagnostics();
if (feedResponseDiagnostics != null) {
combinedClientSideRequestStatistics.addAll(
feedResponseDiagnostics.getClientSideRequestStatistics());
}
}
return combinedClientSideRequestStatistics;
}
/**
* The final status code of the operation (possibly after retries)
* @return the final status code of the operation (possibly after retries)
*/
public int getStatusCode() {
return this.statusCode;
}
/**
* The final sub-status code of the operation (possibly after retries)
* @return the final sub-status code of the operation (possibly after retries)
*/
public int getSubStatusCode() {
return this.subStatusCode;
}
/**
* The final error when the operation failed
* @return the final error when the operation failed
*/
public Throwable getFinalError() {
return this.finalError;
}
/**
* The max. request payload size in bytes
* @return the max. request payload size in bytes
*/
public int getMaxRequestPayloadSizeInBytes() {
return this.maxRequestSize;
}
/**
* The max. response payload size in bytes.
* @return the max. response payload size in bytes
*/
public int getMaxResponsePayloadSizeInBytes() {
return this.maxResponseSize;
}
/**
* The total request charge across all retries.
* @return the total request charge across all retries.
*/
public float getTotalRequestCharge() {
return this.totalRequestCharge;
}
/**
* Returns the set of contacted regions
* @return the set of contacted regions
*/
public Set<String> getContactedRegionNames() {
TreeSet<String> regionsContacted = new TreeSet<>();
if (this.diagnostics == null) {
return regionsContacted;
}
for (CosmosDiagnostics d: this.diagnostics) {
regionsContacted.addAll(d.getContactedRegionNames());
}
return regionsContacted;
}
/**
* Returns the system usage
* NOTE: this information is not included in the json representation returned from {@link
* is usually only relevant when thresholds are violated, in which case the entire diagnostics json-string is
* included. Calling this method will lazily collect the user agent - which can be useful when writing
* a custom {@link CosmosDiagnosticsHandler}
* @return the system usage
*/
public Collection<String> getSystemUsage() {
TreeSet<String> systemUsage = new TreeSet<>();
if (this.diagnostics == null) {
return null;
}
for (CosmosDiagnostics d: this.diagnostics) {
if (d.getDiagnosticsContext() != null) {
systemUsage.addAll(d.getDiagnosticsContext().getSystemUsage());
}
}
return systemUsage;
}
/**
* Returns the number of retries and/or attempts for speculative processing.
* @return the number of retries and/or attempts for speculative processing.
*/
public int getRetryCount() {
if (this.diagnostics == null) {
return 0;
}
int totalRetryCount = 0;
for (ClientSideRequestStatistics c: this.getDistinctCombinedClientSideRequestStatistics()) {
totalRetryCount += getRetryCount(c);
}
return Math.max(0, totalRetryCount);
}
private int getRetryCount(ClientSideRequestStatistics c) {
if (c == null || c.getRetryContext() == null) {
return 0;
}
return c.getRetryContext().getRetryCount();
}
void addRequestCharge(float requestCharge) {
synchronized (this.spanName) {
this.totalRequestCharge += requestCharge;
}
}
void addRequestSize(int bytes) {
synchronized (this.spanName) {
this.maxRequestSize = Math.max(this.maxRequestSize, bytes);
}
}
void addResponseSize(int bytes) {
synchronized (this.spanName) {
this.maxResponseSize = Math.max(this.maxResponseSize, bytes);
}
}
/**
* The diagnostic records for service interactions within the scope of this SDK operation
* @return the diagnostic records for service interactions within the scope of this SDK operation
*/
public Collection<CosmosDiagnostics> getDiagnostics() {
return this.diagnostics;
}
/**
* Returns a flag indicating whether the operation has been completed yet.
* @return a flag indicating whether the operation has been completed yet.
*/
public boolean isCompleted() {
return this.isCompleted.get();
}
/**
* The total end-to-end duration of the operation.
* @return the total end-to-end duration of the operation.
*/
public Duration getDuration() {
return this.duration;
}
/**
* A flag indicating whether the operation should be considered failed or not based on the status code handling
* rules in {@link CosmosDiagnosticsThresholds
* @return a flag indicating whether the operation should be considered failed or not
*/
public boolean isFailure() {
if (!this.isCompleted()) {
return false;
}
return this.thresholds.isFailureCondition(this.statusCode, this.subStatusCode);
}
void startOperation() {
checkState(
this.startTime == null,
"Method 'startOperation' must not be called multiple times.");
synchronized (this.spanName) {
this.startTime = Instant.now();
this.cachedRequestDiagnostics = null;
}
}
synchronized boolean endOperation(int statusCode, int subStatusCode, Integer actualItemCount, Throwable finalError) {
synchronized (this.spanName) {
boolean hasCompletedOperation = this.isCompleted.compareAndSet(false, true);
if (hasCompletedOperation) {
this.recordOperation(statusCode, subStatusCode, actualItemCount, finalError);
}
return hasCompletedOperation;
}
}
synchronized void recordOperation(int statusCode, int subStatusCode, Integer actualItemCount, Throwable finalError) {
synchronized (this.spanName) {
this.statusCode = statusCode;
this.subStatusCode = subStatusCode;
this.finalError = finalError;
if (actualItemCount != null) {
if (!this.actualItemCount.compareAndSet(-1, actualItemCount)) {
this.actualItemCount.addAndGet(actualItemCount);
}
}
this.duration = Duration.between(this.startTime, Instant.now());
this.cachedRequestDiagnostics = null;
}
}
synchronized void setSamplingRateSnapshot(double samplingRate) {
this.samplingRateSnapshot = samplingRate;
for (CosmosDiagnostics d : this.diagnostics) {
diagAccessor.setSamplingRateSnapshot(d, samplingRate);
}
}
String getRequestDiagnostics() {
ObjectNode ctxNode = mapper.createObjectNode();
ctxNode.put("spanName", this.spanName);
ctxNode.put("account", this.accountName);
ctxNode.put("db", this.databaseName);
if (!this.collectionName.isEmpty()) {
ctxNode.put("container", this.collectionName);
}
ctxNode.put("resource", this.resourceType.toString());
ctxNode.put("operation", this.operationType.toString());
if (!this.operationId.isEmpty()) {
ctxNode.put("operationId", this.operationId);
}
if (this.trackingId != null && !this.trackingId.isEmpty()) {
ctxNode.put("trackingId", this.trackingId);
}
ctxNode.put("consistency", this.consistencyLevel.toString());
ctxNode.put("status", this.statusCode);
if (this.subStatusCode != 0) {
ctxNode.put("subStatus", this.subStatusCode);
}
ctxNode.put("RUs", this.totalRequestCharge);
ctxNode.put("maxRequestSizeInBytes", this.maxRequestSize);
ctxNode.put("maxResponseSizeInBytes", this.maxResponseSize);
if (this.maxItemCount != null) {
ctxNode.put("maxItems", this.maxItemCount);
}
if (this.actualItemCount.get() >= 0) {
ctxNode.put("actualItems", this.actualItemCount.get());
}
if (this.finalError != null) {
ctxNode.put("exception", this.finalError.toString());
}
if (this.diagnostics != null && this.diagnostics.size() > 0) {
ArrayNode diagnosticsNode = ctxNode.putArray("diagnostics");
for (CosmosDiagnostics d: this.diagnostics) {
ObjectNode childNode = mapper.createObjectNode();
d.fillCosmosDiagnostics(childNode, null);
diagnosticsNode.add(childNode);
}
}
try {
return mapper.writeValueAsString(ctxNode);
} catch (JsonProcessingException e) {
ctxNode = mapper.createObjectNode();
ctxNode.put("exception", e.toString());
try {
return mapper.writeValueAsString(ctxNode);
} catch (JsonProcessingException ex) {
throw new RuntimeException(ex);
}
}
}
/**
* Returns a json-string representation of the diagnostics context. This string uses json format for readability,
* but it should be treated as an opaque string - the format can and will change between SDK versions - for any
* automatic processing of the diagnostics information the get-properties of public API should be used.
* @return a json-string representation of the diagnostics context. This string uses json format for readability,
* but it should be treated as an opaque string - the format can and will change between SDK versions -
* for any
* automatic processing of the diagnostics information the get-properties of public API should be used.
*/
public String toJson() {
String snapshot = this.cachedRequestDiagnostics;
if (snapshot != null) {
return snapshot;
}
synchronized (this.spanName) {
snapshot = this.cachedRequestDiagnostics;
if (snapshot != null) {
return snapshot;
}
return this.cachedRequestDiagnostics = getRequestDiagnostics();
}
}
/**
* Gets the UserAgent header value used by the client issueing this operation
* NOTE: this information is not included in the json representation returned from {@link
* is usually only relevant when thresholds are violated, in which case the entire diagnostics json-string is
* included. Calling this method will lazily collect the user agent - which can be useful when writing
* a custom {@link CosmosDiagnosticsHandler}
* @return the UserAgent header value used for the client that issued this operation
*/
public String getUserAgent() {
if (this.diagnostics == null) {
return "";
}
CosmosDiagnostics diagnostics = this.diagnostics.peekFirst();
if (diagnostics == null) {
return "";
}
return diagnostics.getUserAgent();
}
/**
* Returns the set of contacted regions
* NOTE: this information is not included in the json representation returned from {@link
* is usually only relevant when thresholds are violated, in which case the entire diagnostics json-string is
* included. Calling this method will lazily collect the user agent - which can be useful when writing
* a custom {@link CosmosDiagnosticsHandler}
* @return the set of contacted regions
*/
private static void addRequestInfoForGatewayStatistics(
ClientSideRequestStatistics requestStats,
ArrayList<CosmosDiagnosticsRequestInfo> requestInfo,
ClientSideRequestStatistics.GatewayStatistics gatewayStats) {
if (gatewayStats == null) {
return;
}
CosmosDiagnosticsRequestInfo info = new CosmosDiagnosticsRequestInfo(
requestStats.getActivityId(),
null,
gatewayStats.getPartitionKeyRangeId(),
gatewayStats.getResourceType() + ":" + gatewayStats.getOperationType(),
requestStats.getRequestStartTimeUTC(),
requestStats.getDuration(),
null,
gatewayStats.getRequestCharge(),
gatewayStats.getResponsePayloadSizeInBytes(),
gatewayStats.getStatusCode(),
gatewayStats.getSubStatusCode(),
new ArrayList<>()
);
requestInfo.add(info);
}
private static void addRequestInfoForStoreResponses(
ClientSideRequestStatistics requestStats,
ArrayList<CosmosDiagnosticsRequestInfo> requestInfo,
List<ClientSideRequestStatistics.StoreResponseStatistics> storeResponses) {
for (ClientSideRequestStatistics.StoreResponseStatistics responseStats: storeResponses) {
StoreResultDiagnostics resultDiagnostics = responseStats.getStoreResult();
if (resultDiagnostics == null) {
continue;
}
StoreResponseDiagnostics responseDiagnostics = resultDiagnostics.getStoreResponseDiagnostics();
String partitionId = null;
String[] partitionAndReplicaId = resultDiagnostics.getPartitionAndReplicaId();
if (partitionAndReplicaId != null) {
partitionId = partitionAndReplicaId[0];
}
Collection<CosmosDiagnosticsRequestEvent> events = new ArrayList<>();
if (responseDiagnostics != null) {
RequestTimeline timeline = responseDiagnostics.getRequestTimeline();
timeline.forEach( e ->
events.add(new CosmosDiagnosticsRequestEvent(e.getStartTime(), e.getDuration(), e.getName()))
);
}
Duration backendLatency = null;
if (resultDiagnostics.getBackendLatencyInMs() != null) {
backendLatency = Duration.ofNanos((long)(1000000 * resultDiagnostics.getBackendLatencyInMs()));
}
CosmosDiagnosticsRequestInfo info = new CosmosDiagnosticsRequestInfo(
requestStats.getActivityId(),
partitionId,
responseDiagnostics.getPartitionKeyRangeId(),
responseStats.getRequestResourceType() + ":" + responseStats.getRequestOperationType(),
requestStats.getRequestStartTimeUTC(),
requestStats.getDuration(),
backendLatency,
responseDiagnostics.getRequestCharge(),
responseDiagnostics.getResponsePayloadLength(),
responseDiagnostics.getStatusCode(),
responseDiagnostics.getSubStatusCode(),
events
);
requestInfo.add(info);
}
}
private void addRequestInfoForAddressResolution(
ClientSideRequestStatistics requestStats,
ArrayList<CosmosDiagnosticsRequestInfo> requestInfo,
Map<String, ClientSideRequestStatistics.AddressResolutionStatistics> addressResolutionStatisticsMap
) {
if (addressResolutionStatisticsMap == null || addressResolutionStatisticsMap.size() == 0) {
return;
}
for (ClientSideRequestStatistics.AddressResolutionStatistics addressResolutionStatistics
: addressResolutionStatisticsMap.values()) {
if (addressResolutionStatistics.isInflightRequest() ||
addressResolutionStatistics.getEndTimeUTC() == null) {
continue;
}
Duration latency = Duration.between(
addressResolutionStatistics.getStartTimeUTC(),
addressResolutionStatistics.getEndTimeUTC());
StringBuilder sb = new StringBuilder();
sb.append("AddressResolution|");
sb.append(addressResolutionStatistics.getTargetEndpoint());
sb.append("|");
if (addressResolutionStatistics.isForceRefresh()) {
sb.append("1|");
} else {
sb.append("0|");
}
if (addressResolutionStatistics.isForceCollectionRoutingMapRefresh()) {
sb.append("1");
} else {
sb.append("0");
}
CosmosDiagnosticsRequestInfo info = new CosmosDiagnosticsRequestInfo(
requestStats.getActivityId(),
null,
null,
sb.toString(),
addressResolutionStatistics.getStartTimeUTC(),
latency,
null,
0,
0,
0,
0,
new ArrayList()
);
requestInfo.add(info);
}
}
/**
* Gets a collection of {@link CosmosDiagnosticsRequestInfo} records providing more information about
* individual requests issued in the transport layer to process this operation.
* NOTE: this information is not included in the json representation returned from {@link
* is usually only relevant when thresholds are violated, in which case the entire diagnostics json-string is
* included. Calling this method will lazily collect the user agent - which can be useful when writing
* a custom {@link CosmosDiagnosticsHandler}
* @return a collection of {@link CosmosDiagnosticsRequestInfo} records providing more information about
* individual requests issued in the transport layer to process this operation.
*/
public Collection<CosmosDiagnosticsRequestInfo> getRequestInfo() {
ArrayList<CosmosDiagnosticsRequestInfo> snapshot = this.requestInfo;
if (snapshot != null) {
return snapshot;
}
synchronized (this.spanName) {
if (this.requestInfo != null) {
return this.requestInfo;
}
snapshot = new ArrayList<>();
for (ClientSideRequestStatistics requestStats: this.getDistinctCombinedClientSideRequestStatistics()) {
addRequestInfoForStoreResponses(
requestStats,
snapshot,
requestStats.getResponseStatisticsList());
addRequestInfoForStoreResponses(
requestStats,
snapshot,
requestStats.getSupplementalResponseStatisticsList());
addRequestInfoForGatewayStatistics(requestStats, snapshot, requestStats.getGatewayStatistics());
addRequestInfoForAddressResolution(
requestStats,
snapshot,
requestStats.getAddressResolutionStatistics());
}
this.requestInfo = snapshot;
return snapshot;
}
}
static void initialize() {
ImplementationBridgeHelpers
.CosmosDiagnosticsContextHelper
.setCosmosDiagnosticsContextAccessor(
new ImplementationBridgeHelpers
.CosmosDiagnosticsContextHelper
.CosmosDiagnosticsContextAccessor() {
@Override
public CosmosDiagnosticsContext create(String spanName, String account, String endpoint,
String databaseId,String containerId,
ResourceType resourceType, OperationType operationType,
String operationId,
ConsistencyLevel consistencyLevel, Integer maxItemCount,
CosmosDiagnosticsThresholds thresholds, String trackingId) {
return new CosmosDiagnosticsContext(
spanName,
account,
endpoint,
databaseId,
containerId,
resourceType,
operationType,
operationId,
consistencyLevel,
maxItemCount,
thresholds,
trackingId);
}
@Override
public void startOperation(CosmosDiagnosticsContext ctx) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
ctx.startOperation();
}
@Override
public void recordOperation(CosmosDiagnosticsContext ctx, int statusCode, int subStatusCode,
Integer actualItemCount, Double requestCharge,
CosmosDiagnostics diagnostics, Throwable finalError) {
validateAndRecordOperationResult(ctx, requestCharge, diagnostics);
ctx.recordOperation(statusCode, subStatusCode, actualItemCount, finalError);
}
private void validateAndRecordOperationResult(
CosmosDiagnosticsContext ctx,
Double requestCharge,
CosmosDiagnostics diagnostics) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
if (diagnostics != null) {
ctx.addDiagnostics(diagnostics);
}
if (requestCharge != null) {
ctx.addRequestCharge(requestCharge.floatValue());
}
}
@Override
public boolean endOperation(CosmosDiagnosticsContext ctx, int statusCode, int subStatusCode,
Integer actualItemCount, Double requestCharge,
CosmosDiagnostics diagnostics, Throwable finalError) {
validateAndRecordOperationResult(ctx, requestCharge, diagnostics);
return ctx.endOperation(statusCode, subStatusCode, actualItemCount, finalError);
}
@Override
public void addRequestCharge(CosmosDiagnosticsContext ctx, float requestCharge) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
ctx.addRequestCharge(requestCharge);
}
@Override
public void addRequestSize(CosmosDiagnosticsContext ctx, int bytes) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
ctx.addRequestSize(bytes);
}
@Override
public void addResponseSize(CosmosDiagnosticsContext ctx, int bytes) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
ctx.addResponseSize(bytes);
}
@Override
public void addDiagnostics(CosmosDiagnosticsContext ctx, CosmosDiagnostics diagnostics) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
checkNotNull(ctx, "Argument 'diagnostics' must not be null.");
ctx.addDiagnostics(diagnostics);
}
@Override
public Collection<CosmosDiagnostics> getDiagnostics(CosmosDiagnosticsContext ctx) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
return ctx.getDiagnostics();
}
@Override
public ResourceType getResourceType(CosmosDiagnosticsContext ctx) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
return ctx.getResourceTypeInternal();
}
@Override
public OperationType getOperationType(CosmosDiagnosticsContext ctx) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
return ctx.getOperationTypeInternal();
}
@Override
public String getEndpoint(CosmosDiagnosticsContext ctx) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
return ctx.getEndpoint();
}
@Override
public Collection<ClientSideRequestStatistics> getDistinctCombinedClientSideRequestStatistics(CosmosDiagnosticsContext ctx) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
return ctx.getDistinctCombinedClientSideRequestStatistics();
}
@Override
public String getSpanName(CosmosDiagnosticsContext ctx) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
return ctx.getSpanName();
}
@Override
public void setSamplingRateSnapshot(CosmosDiagnosticsContext ctx, double samplingRate) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
ctx.setSamplingRateSnapshot(samplingRate);
}
});
}
} | class CosmosDiagnosticsContext {
private final static ImplementationBridgeHelpers.CosmosDiagnosticsHelper.CosmosDiagnosticsAccessor diagAccessor =
ImplementationBridgeHelpers.CosmosDiagnosticsHelper.getCosmosDiagnosticsAccessor();
private final static ObjectMapper mapper = Utils.getSimpleObjectMapper();
private final String spanName;
private final String accountName;
private final String endpoint;
private final String databaseName;
private final String collectionName;
private final ResourceType resourceType;
private final String resourceTypeString;
private final OperationType operationType;
private final String operationTypeString;
private final ConsistencyLevel consistencyLevel;
private final ConcurrentLinkedDeque<CosmosDiagnostics> diagnostics;
private final Integer maxItemCount;
private final CosmosDiagnosticsThresholds thresholds;
private final String operationId;
private final String trackingId;
private final String connectionMode;
private final String userAgent;
private Throwable finalError;
private Instant startTime = null;
private Duration duration = null;
private int statusCode = 0;
private int subStatusCode = 0;
private final AtomicInteger actualItemCount = new AtomicInteger(-1);
private float totalRequestCharge = 0;
private int maxRequestSize = 0;
private int maxResponseSize = 0;
private String cachedRequestDiagnostics = null;
private final AtomicBoolean isCompleted = new AtomicBoolean(false);
private CosmosDiagnosticsSystemUsageSnapshot systemUsage;
private Double samplingRateSnapshot;
private ArrayList<CosmosDiagnosticsRequestInfo> requestInfo = null;
CosmosDiagnosticsContext(
String spanName,
String accountName,
String endpoint,
String databaseName,
String collectionName,
ResourceType resourceType,
OperationType operationType,
String operationId,
ConsistencyLevel consistencyLevel,
Integer maxItemCount,
CosmosDiagnosticsThresholds thresholds,
String trackingId,
String connectionMode,
String userAgent) {
checkNotNull(spanName, "Argument 'spanName' must not be null.");
checkNotNull(accountName, "Argument 'accountName' must not be null.");
checkNotNull(endpoint, "Argument 'endpoint' must not be null.");
checkNotNull(resourceType, "Argument 'resourceType' must not be null.");
checkNotNull(operationType, "Argument 'operationType' must not be null.");
checkNotNull(consistencyLevel, "Argument 'consistencyLevel' must not be null.");
checkNotNull(thresholds, "Argument 'thresholds' must not be null.");
checkNotNull(connectionMode, "Argument 'connectionMode' must not be null.");
checkNotNull(userAgent, "Argument 'userAgent' must not be null.");
this.spanName = spanName;
this.accountName = accountName;
this.endpoint = endpoint;
this.databaseName = databaseName != null ? databaseName : "";
this.collectionName = collectionName != null ? collectionName : "";
this.resourceType = resourceType;
this.resourceTypeString = resourceType.toString();
this.operationType = operationType;
this.operationTypeString = operationType.toString();
this.operationId = operationId != null ? operationId : "";
this.diagnostics = new ConcurrentLinkedDeque<>();
this.consistencyLevel = consistencyLevel;
this.maxItemCount = maxItemCount;
this.thresholds = thresholds;
this.trackingId = trackingId;
this.userAgent = userAgent;
this.connectionMode = connectionMode;
}
/**
* The name of the account related to the operation
* @return the name of the account related to the operation
*/
public String getAccountName() {
return this.accountName;
}
String getEndpoint() { return this.endpoint; }
/**
* The name of the database related to the operation
* @return the name of the database related to the operation
*/
public String getDatabaseName() {
return this.databaseName;
}
/**
* The name of the container related to the operation
* @return the name of the collection related to the operation
*/
public String getContainerName() {
return this.collectionName;
}
/**
* The resource type of the operation
* @return the resource type of the operation
*/
public String getResourceType() {
return this.resourceTypeString;
}
ResourceType getResourceTypeInternal() {
return this.resourceType;
}
/**
* The operation type of the operation
* @return the operation type of the operation
*/
public String getOperationType() {
return this.operationTypeString;
}
/**
* The trackingId of a write operation. Will be null for read-/query- or feed operations or when non-idempotent
* writes are disabled for writes or only enabled without trackingId propagation.
* @return the trackingId of an operation
*/
public String getTrackingId() {
return this.trackingId;
}
/**
* A flag indicating whether the operation is a point operation or not.
* @return a flag indicating whether the operation is a point operation or not.
*/
public boolean isPointOperation() {
return this.operationType.isPointOperation();
}
OperationType getOperationTypeInternal() {
return this.operationType;
}
/**
* The operation identifier of the operation - this can be used to
* add a dimension for feed operations - like queries -
* so, metrics and diagnostics can be separated for different query types etc.
* @return the operation identifier of the operation
*/
public String getOperationId() {
return this.operationId;
}
/**
* The effective consistency level of the operation
* @return the effective consistency level of the operation
*/
public ConsistencyLevel getEffectiveConsistencyLevel() {
return this.consistencyLevel;
}
/**
* The max. number of items requested in a feed operation
* @return the max. number of items requested in a feed operation. Will be null for point operations.
*/
public Integer getMaxItemCount() {
return this.maxItemCount;
}
/**
* The actual number of items returned by a feed operation
* @return the actual number of items returned by a feed operation. Will be null for point operations.
*/
public Integer getActualItemCount() {
int snapshot = this.actualItemCount.get();
if (snapshot < 0) {
return null;
}
return snapshot;
}
/**
* The span name as a logical identifier for an operation
* @return the span name as a logical identifier for an operation
*/
String getSpanName() {
return this.spanName;
}
/**
* Indicates whether the latency, request charge or payload size of the operation exceeded the given threshold
* @return a flag indicating whether the latency, request charge or payload size of the operation
* exceeded its threshold.
*/
public boolean isThresholdViolated() {
if (!this.isCompleted()) {
return false;
}
if (this.thresholds.isFailureCondition(this.statusCode, this.subStatusCode)) {
return true;
}
if (this.operationType.isPointOperation()) {
if (this.thresholds.getPointOperationLatencyThreshold().compareTo(this.duration) < 0) {
return true;
}
} else {
if (this.thresholds.getNonPointOperationLatencyThreshold().compareTo(this.duration) < 0) {
return true;
}
}
if (this.thresholds.getRequestChargeThreshold() < this.totalRequestCharge) {
return true;
}
return this.thresholds.getPayloadSizeThreshold() < Math.max(this.maxRequestSize, this.maxResponseSize);
}
void addDiagnostics(CosmosDiagnostics cosmosDiagnostics) {
checkNotNull(cosmosDiagnostics, "Argument 'cosmosDiagnostics' must not be null.");
synchronized (this.spanName) {
if (this.samplingRateSnapshot != null) {
diagAccessor.setSamplingRateSnapshot(cosmosDiagnostics, this.samplingRateSnapshot);
}
this.addRequestSize(diagAccessor.getRequestPayloadSizeInBytes(cosmosDiagnostics));
this.addResponseSize(diagAccessor.getTotalResponsePayloadSizeInBytes(cosmosDiagnostics));
this.diagnostics.add(cosmosDiagnostics);
this.cachedRequestDiagnostics = null;
this.requestInfo = null;
cosmosDiagnostics.setDiagnosticsContext(this);
}
}
Collection<ClientSideRequestStatistics> getDistinctCombinedClientSideRequestStatistics() {
DistinctClientSideRequestStatisticsCollection combinedClientSideRequestStatistics =
new DistinctClientSideRequestStatisticsCollection();
for (CosmosDiagnostics diagnostics: this.getDiagnostics()) {
combinedClientSideRequestStatistics.addAll(
diagnostics.getClientSideRequestStatistics());
FeedResponseDiagnostics feedResponseDiagnostics =
diagnostics.getFeedResponseDiagnostics();
if (feedResponseDiagnostics != null) {
combinedClientSideRequestStatistics.addAll(
feedResponseDiagnostics.getClientSideRequestStatistics());
}
}
return combinedClientSideRequestStatistics;
}
/**
* The final status code of the operation (possibly after retries)
* @return the final status code of the operation (possibly after retries)
*/
public int getStatusCode() {
return this.statusCode;
}
/**
* The final sub-status code of the operation (possibly after retries)
* @return the final sub-status code of the operation (possibly after retries)
*/
public int getSubStatusCode() {
return this.subStatusCode;
}
/**
* The final error when the operation failed
* @return the final error when the operation failed
*/
public Throwable getFinalError() {
return this.finalError;
}
/**
* The max. request payload size in bytes
* @return the max. request payload size in bytes
*/
public int getMaxRequestPayloadSizeInBytes() {
return this.maxRequestSize;
}
/**
* The max. response payload size in bytes.
* @return the max. response payload size in bytes
*/
public int getMaxResponsePayloadSizeInBytes() {
return this.maxResponseSize;
}
/**
* The total request charge across all retries.
* @return the total request charge across all retries.
*/
public float getTotalRequestCharge() {
return this.totalRequestCharge;
}
/**
* Returns the set of contacted regions
* @return the set of contacted regions
*/
public Set<String> getContactedRegionNames() {
TreeSet<String> regionsContacted = new TreeSet<>();
if (this.diagnostics == null) {
return regionsContacted;
}
for (CosmosDiagnostics d: this.diagnostics) {
regionsContacted.addAll(d.getContactedRegionNames());
}
return regionsContacted;
}
/**
* Returns the system usage
* NOTE: this information is not included in the json representation returned from {@link
* is usually only relevant when thresholds are violated, in which case the entire diagnostics json-string is
* included. Calling this method will lazily collect the system usage - which can be useful when writing
* a custom {@link CosmosDiagnosticsHandler}
* @return the system usage
*/
public CosmosDiagnosticsSystemUsageSnapshot getSystemUsage() {
synchronized (this.spanName) {
CosmosDiagnosticsSystemUsageSnapshot snapshot = this.systemUsage;
if (snapshot != null) {
return snapshot;
}
return this.systemUsage = ClientSideRequestStatistics.fetchSystemInformation();
}
}
/**
* Returns the number of retries and/or attempts for speculative processing.
* @return the number of retries and/or attempts for speculative processing.
*/
public int getRetryCount() {
if (this.diagnostics == null) {
return 0;
}
int totalRetryCount = 0;
for (ClientSideRequestStatistics c: this.getDistinctCombinedClientSideRequestStatistics()) {
totalRetryCount += getRetryCount(c);
}
return Math.max(0, totalRetryCount);
}
private int getRetryCount(ClientSideRequestStatistics c) {
if (c == null || c.getRetryContext() == null) {
return 0;
}
return c.getRetryContext().getRetryCount();
}
void addRequestCharge(float requestCharge) {
synchronized (this.spanName) {
this.totalRequestCharge += requestCharge;
}
}
void addRequestSize(int bytes) {
synchronized (this.spanName) {
this.maxRequestSize = Math.max(this.maxRequestSize, bytes);
}
}
void addResponseSize(int bytes) {
synchronized (this.spanName) {
this.maxResponseSize = Math.max(this.maxResponseSize, bytes);
}
}
/**
* The diagnostic records for service interactions within the scope of this SDK operation
* @return the diagnostic records for service interactions within the scope of this SDK operation
*/
public Collection<CosmosDiagnostics> getDiagnostics() {
return this.diagnostics;
}
/**
* Returns a flag indicating whether the operation has been completed yet.
* @return a flag indicating whether the operation has been completed yet.
*/
public boolean isCompleted() {
return this.isCompleted.get();
}
/**
* The total end-to-end duration of the operation.
* @return the total end-to-end duration of the operation.
*/
public Duration getDuration() {
return this.duration;
}
/**
* A flag indicating whether the operation should be considered failed or not based on the status code handling
* rules in {@link CosmosDiagnosticsThresholds
* @return a flag indicating whether the operation should be considered failed or not
*/
public boolean isFailure() {
if (!this.isCompleted()) {
return false;
}
return this.thresholds.isFailureCondition(this.statusCode, this.subStatusCode);
}
void startOperation() {
checkState(
this.startTime == null,
"Method 'startOperation' must not be called multiple times.");
synchronized (this.spanName) {
this.startTime = Instant.now();
this.cachedRequestDiagnostics = null;
}
}
synchronized boolean endOperation(int statusCode, int subStatusCode, Integer actualItemCount, Throwable finalError) {
synchronized (this.spanName) {
boolean hasCompletedOperation = this.isCompleted.compareAndSet(false, true);
if (hasCompletedOperation) {
this.recordOperation(statusCode, subStatusCode, actualItemCount, finalError);
}
return hasCompletedOperation;
}
}
synchronized void recordOperation(int statusCode, int subStatusCode, Integer actualItemCount, Throwable finalError) {
synchronized (this.spanName) {
this.statusCode = statusCode;
this.subStatusCode = subStatusCode;
this.finalError = finalError;
if (actualItemCount != null) {
if (!this.actualItemCount.compareAndSet(-1, actualItemCount)) {
this.actualItemCount.addAndGet(actualItemCount);
}
}
this.duration = Duration.between(this.startTime, Instant.now());
this.cachedRequestDiagnostics = null;
}
}
synchronized void setSamplingRateSnapshot(double samplingRate) {
this.samplingRateSnapshot = samplingRate;
for (CosmosDiagnostics d : this.diagnostics) {
diagAccessor.setSamplingRateSnapshot(d, samplingRate);
}
}
String getRequestDiagnostics() {
ObjectNode ctxNode = mapper.createObjectNode();
ctxNode.put("spanName", this.spanName);
ctxNode.put("account", this.accountName);
ctxNode.put("db", this.databaseName);
if (!this.collectionName.isEmpty()) {
ctxNode.put("container", this.collectionName);
}
ctxNode.put("resource", this.resourceType.toString());
ctxNode.put("operation", this.operationType.toString());
if (!this.operationId.isEmpty()) {
ctxNode.put("operationId", this.operationId);
}
if (this.trackingId != null && !this.trackingId.isEmpty()) {
ctxNode.put("trackingId", this.trackingId);
}
ctxNode.put("consistency", this.consistencyLevel.toString());
ctxNode.put("status", this.statusCode);
if (this.subStatusCode != 0) {
ctxNode.put("subStatus", this.subStatusCode);
}
ctxNode.put("RUs", this.totalRequestCharge);
ctxNode.put("maxRequestSizeInBytes", this.maxRequestSize);
ctxNode.put("maxResponseSizeInBytes", this.maxResponseSize);
if (this.maxItemCount != null) {
ctxNode.put("maxItems", this.maxItemCount);
}
if (this.actualItemCount.get() >= 0) {
ctxNode.put("actualItems", this.actualItemCount.get());
}
if (this.finalError != null) {
ctxNode.put("exception", this.finalError.toString());
}
if (this.diagnostics != null && this.diagnostics.size() > 0) {
ArrayNode diagnosticsNode = ctxNode.putArray("diagnostics");
for (CosmosDiagnostics d: this.diagnostics) {
ObjectNode childNode = mapper.createObjectNode();
d.fillCosmosDiagnostics(childNode, null);
diagnosticsNode.add(childNode);
}
}
try {
return mapper.writeValueAsString(ctxNode);
} catch (JsonProcessingException e) {
ctxNode = mapper.createObjectNode();
ctxNode.put("exception", e.toString());
try {
return mapper.writeValueAsString(ctxNode);
} catch (JsonProcessingException ex) {
throw new RuntimeException(ex);
}
}
}
/**
* Returns a json-string representation of the diagnostics context. This string uses json format for readability,
* but it should be treated as an opaque string - the format can and will change between SDK versions - for any
* automatic processing of the diagnostics information the get-properties of public API should be used.
* @return a json-string representation of the diagnostics context. This string uses json format for readability,
* but it should be treated as an opaque string - the format can and will change between SDK versions -
* for any
* automatic processing of the diagnostics information the get-properties of public API should be used.
*/
public String toJson() {
String snapshot = this.cachedRequestDiagnostics;
if (snapshot != null) {
return snapshot;
}
synchronized (this.spanName) {
snapshot = this.cachedRequestDiagnostics;
if (snapshot != null) {
return snapshot;
}
this.systemUsage = ClientSideRequestStatistics.fetchSystemInformation();
return this.cachedRequestDiagnostics = getRequestDiagnostics();
}
}
/**
* Gets the UserAgent header value used by the client issuing this operation
* NOTE: this information is not included in the json representation returned from {@link
* is usually only relevant when thresholds are violated, in which case the entire diagnostics json-string is
* included.
* @return the UserAgent header value used for the client that issued this operation
*/
public String getUserAgent() {
return this.userAgent;
}
/**
* Returns the connection mode used in the client.
* NOTE: this information is not included in the json representation returned from {@link
* is usually only relevant when thresholds are violated, in which case the entire diagnostics json-string is
* included.
* @return the connection mode used in the client.
*/
private static void addRequestInfoForGatewayStatistics(
ClientSideRequestStatistics requestStats,
List<CosmosDiagnosticsRequestInfo> requestInfo) {
ClientSideRequestStatistics.GatewayStatistics gatewayStats = requestStats.getGatewayStatistics();
if (gatewayStats == null) {
return;
}
CosmosDiagnosticsRequestInfo info = new CosmosDiagnosticsRequestInfo(
requestStats.getActivityId(),
null,
gatewayStats.getPartitionKeyRangeId(),
gatewayStats.getResourceType() + ":" + gatewayStats.getOperationType(),
requestStats.getRequestStartTimeUTC(),
requestStats.getDuration(),
null,
gatewayStats.getRequestCharge(),
gatewayStats.getResponsePayloadSizeInBytes(),
gatewayStats.getStatusCode(),
gatewayStats.getSubStatusCode(),
new ArrayList<>()
);
requestInfo.add(info);
}
private static void addRequestInfoForStoreResponses(
ClientSideRequestStatistics requestStats,
List<CosmosDiagnosticsRequestInfo> requestInfo,
List<ClientSideRequestStatistics.StoreResponseStatistics> storeResponses) {
for (ClientSideRequestStatistics.StoreResponseStatistics responseStats: storeResponses) {
StoreResultDiagnostics resultDiagnostics = responseStats.getStoreResult();
if (resultDiagnostics == null) {
continue;
}
StoreResponseDiagnostics responseDiagnostics = resultDiagnostics.getStoreResponseDiagnostics();
String partitionId = null;
String[] partitionAndReplicaId = resultDiagnostics.getPartitionAndReplicaId();
if (partitionAndReplicaId.length == 2) {
partitionId = partitionAndReplicaId[0];
}
List<CosmosDiagnosticsRequestEvent> events = new ArrayList<>();
String pkRangeId = "";
double requestCharge = 0;
int responsePayloadLength = 0;
int statusCode = 0;
int subStatusCode = 0;
String activityId = requestStats.getActivityId();
if (responseDiagnostics != null) {
activityId = responseDiagnostics.getActivityId();
requestCharge = responseDiagnostics.getRequestCharge();
responsePayloadLength = responseDiagnostics.getResponsePayloadLength();
statusCode = responseDiagnostics.getStatusCode();
subStatusCode = responseDiagnostics.getSubStatusCode();
if (responseDiagnostics.getPartitionKeyRangeId() != null) {
pkRangeId = responseDiagnostics.getPartitionKeyRangeId();
}
RequestTimeline timeline = responseDiagnostics.getRequestTimeline();
timeline.forEach( e -> {
if (e.getStartTime() != null && e.getDuration() != null && !e.getDuration().equals(Duration.ZERO)) {
events.add(new CosmosDiagnosticsRequestEvent(e.getStartTime(), e.getDuration(), e.getName()));
}
});
}
Duration backendLatency = null;
if (resultDiagnostics.getBackendLatencyInMs() != null) {
backendLatency = Duration.ofNanos((long)(resultDiagnostics.getBackendLatencyInMs() * 1000000d));
}
CosmosDiagnosticsRequestInfo info = new CosmosDiagnosticsRequestInfo(
activityId,
partitionId,
pkRangeId,
responseStats.getRequestResourceType() + ":" + responseStats.getRequestOperationType(),
requestStats.getRequestStartTimeUTC(),
responseStats.getDuration(),
backendLatency,
requestCharge,
responsePayloadLength,
statusCode,
subStatusCode,
events
);
requestInfo.add(info);
}
}
private void addRequestInfoForAddressResolution(
ClientSideRequestStatistics requestStats,
List<CosmosDiagnosticsRequestInfo> requestInfo,
Map<String, ClientSideRequestStatistics.AddressResolutionStatistics> addressResolutionStatisticsMap
) {
if (addressResolutionStatisticsMap == null || addressResolutionStatisticsMap.isEmpty()) {
return;
}
for (Map.Entry<String, ClientSideRequestStatistics.AddressResolutionStatistics> current
: addressResolutionStatisticsMap.entrySet()) {
ClientSideRequestStatistics.AddressResolutionStatistics addressResolutionStatistics = current.getValue();
String addressResolutionActivityId = current.getKey();
if (addressResolutionStatistics.isInflightRequest() ||
addressResolutionStatistics.getEndTimeUTC() == null) {
continue;
}
Duration latency = Duration.between(
addressResolutionStatistics.getStartTimeUTC(),
addressResolutionStatistics.getEndTimeUTC());
StringBuilder sb = new StringBuilder();
sb.append("AddressResolution|");
sb.append(addressResolutionStatistics.getTargetEndpoint());
sb.append("|");
if (addressResolutionStatistics.isForceRefresh()) {
sb.append("1|");
} else {
sb.append("0|");
}
if (addressResolutionStatistics.isForceCollectionRoutingMapRefresh()) {
sb.append("1");
} else {
sb.append("0");
}
CosmosDiagnosticsRequestInfo info = new CosmosDiagnosticsRequestInfo(
addressResolutionActivityId,
null,
null,
sb.toString(),
addressResolutionStatistics.getStartTimeUTC(),
latency,
null,
0,
0,
0,
0,
new ArrayList<>()
);
requestInfo.add(info);
}
}
/**
* Gets a collection of {@link CosmosDiagnosticsRequestInfo} records providing more information about
* individual requests issued in the transport layer to process this operation.
* NOTE: this information is not included in the json representation returned from {@link
* is usually only relevant when thresholds are violated, in which case the entire diagnostics json-string is
* included. Calling this method will lazily collect the user agent - which can be useful when writing
* a custom {@link CosmosDiagnosticsHandler}
* @return a collection of {@link CosmosDiagnosticsRequestInfo} records providing more information about
* individual requests issued in the transport layer to process this operation.
*/
public Collection<CosmosDiagnosticsRequestInfo> getRequestInfo() {
synchronized (this.spanName) {
ArrayList<CosmosDiagnosticsRequestInfo> snapshot = this.requestInfo;
if (snapshot != null) {
return snapshot;
}
snapshot = new ArrayList<>();
for (ClientSideRequestStatistics requestStats: this.getDistinctCombinedClientSideRequestStatistics()) {
addRequestInfoForStoreResponses(
requestStats,
snapshot,
requestStats.getResponseStatisticsList());
addRequestInfoForStoreResponses(
requestStats,
snapshot,
requestStats.getSupplementalResponseStatisticsList());
addRequestInfoForGatewayStatistics(requestStats, snapshot);
addRequestInfoForAddressResolution(
requestStats,
snapshot,
requestStats.getAddressResolutionStatistics());
}
this.requestInfo = snapshot;
return snapshot;
}
}
static void initialize() {
ImplementationBridgeHelpers
.CosmosDiagnosticsContextHelper
.setCosmosDiagnosticsContextAccessor(
new ImplementationBridgeHelpers
.CosmosDiagnosticsContextHelper
.CosmosDiagnosticsContextAccessor() {
@Override
public CosmosDiagnosticsContext create(String spanName, String account, String endpoint,
String databaseId,String containerId,
ResourceType resourceType, OperationType operationType,
String operationId,
ConsistencyLevel consistencyLevel, Integer maxItemCount,
CosmosDiagnosticsThresholds thresholds, String trackingId,
String connectionMode, String userAgent) {
return new CosmosDiagnosticsContext(
spanName,
account,
endpoint,
databaseId,
containerId,
resourceType,
operationType,
operationId,
consistencyLevel,
maxItemCount,
thresholds,
trackingId,
connectionMode,
userAgent);
}
@Override
public CosmosDiagnosticsSystemUsageSnapshot createSystemUsageSnapshot(String cpu, String used, String available, int cpuCount) {
return new CosmosDiagnosticsSystemUsageSnapshot(cpu, used, available, cpuCount);
}
@Override
public void startOperation(CosmosDiagnosticsContext ctx) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
ctx.startOperation();
}
@Override
public void recordOperation(CosmosDiagnosticsContext ctx, int statusCode, int subStatusCode,
Integer actualItemCount, Double requestCharge,
CosmosDiagnostics diagnostics, Throwable finalError) {
validateAndRecordOperationResult(ctx, requestCharge, diagnostics);
ctx.recordOperation(statusCode, subStatusCode, actualItemCount, finalError);
}
private void validateAndRecordOperationResult(
CosmosDiagnosticsContext ctx,
Double requestCharge,
CosmosDiagnostics diagnostics) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
if (diagnostics != null) {
ctx.addDiagnostics(diagnostics);
}
if (requestCharge != null) {
ctx.addRequestCharge(requestCharge.floatValue());
}
}
@Override
public boolean endOperation(CosmosDiagnosticsContext ctx, int statusCode, int subStatusCode,
Integer actualItemCount, Double requestCharge,
CosmosDiagnostics diagnostics, Throwable finalError) {
validateAndRecordOperationResult(ctx, requestCharge, diagnostics);
return ctx.endOperation(statusCode, subStatusCode, actualItemCount, finalError);
}
@Override
public void addRequestCharge(CosmosDiagnosticsContext ctx, float requestCharge) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
ctx.addRequestCharge(requestCharge);
}
@Override
public void addRequestSize(CosmosDiagnosticsContext ctx, int bytes) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
ctx.addRequestSize(bytes);
}
@Override
public void addResponseSize(CosmosDiagnosticsContext ctx, int bytes) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
ctx.addResponseSize(bytes);
}
@Override
public void addDiagnostics(CosmosDiagnosticsContext ctx, CosmosDiagnostics diagnostics) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
checkNotNull(ctx, "Argument 'diagnostics' must not be null.");
ctx.addDiagnostics(diagnostics);
}
@Override
public Collection<CosmosDiagnostics> getDiagnostics(CosmosDiagnosticsContext ctx) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
return ctx.getDiagnostics();
}
@Override
public ResourceType getResourceType(CosmosDiagnosticsContext ctx) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
return ctx.getResourceTypeInternal();
}
@Override
public OperationType getOperationType(CosmosDiagnosticsContext ctx) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
return ctx.getOperationTypeInternal();
}
@Override
public String getEndpoint(CosmosDiagnosticsContext ctx) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
return ctx.getEndpoint();
}
@Override
public Collection<ClientSideRequestStatistics> getDistinctCombinedClientSideRequestStatistics(CosmosDiagnosticsContext ctx) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
return ctx.getDistinctCombinedClientSideRequestStatistics();
}
@Override
public String getSpanName(CosmosDiagnosticsContext ctx) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
return ctx.getSpanName();
}
@Override
public void setSamplingRateSnapshot(CosmosDiagnosticsContext ctx, double samplingRate) {
checkNotNull(ctx, "Argument 'ctx' must not be null.");
ctx.setSamplingRateSnapshot(samplingRate);
}
});
}
} |
This would be simplified if we update the `StartCursor` of the `PulsarPartitionSplit` on snapshot. | protected void initialStartPosition(PulsarPartitionSplit split, Consumer<byte[]> consumer) {
StartCursor startCursor = split.getStartCursor();
TopicPartition partition = split.getPartition();
CursorPosition position =
Optional.ofNullable(split.getLatestConsumedId())
.map(CursorPosition::new)
.orElseGet(
() -> {
return startCursor.position(split);
});
if (position.getType() == CursorPosition.Type.MESSAGE_ID) {
MessageId initialMessageId = position.getMessageId();
if (!initialMessageId.equals(MessageId.earliest)
&& !initialMessageId.equals(MessageId.latest)) {
MessageId lastMessageId =
sneakyAdmin(
() ->
pulsarAdmin
.topics()
.getLastMessageId(partition.getFullTopicName()));
if (initialMessageId.compareTo(lastMessageId) > 0) {
CursorVerification verification = sourceConfiguration.getVerifyInitialOffsets();
if (verification == FAIL_ON_MISMATCH) {
throw new IllegalArgumentException(
"Invalid start position "
+ initialMessageId
+ " for partition "
+ partition);
} else {
if (verification == WARN_ON_MISMATCH) {
LOG.warn(
"Start position {} is wrong, reset to valid position {}",
initialMessageId,
lastMessageId);
}
position = new CursorPosition(MessageId.latest);
}
}
}
}
position.seekPosition(consumer);
} | protected void initialStartPosition(PulsarPartitionSplit split, Consumer<byte[]> consumer) {
StartCursor startCursor = split.getStartCursor();
CursorPosition position = startCursor.position(split);
try {
position.seekPosition(consumer);
} catch (PulsarClientException e) {
if (sourceConfiguration.getVerifyInitialOffsets() == FAIL_ON_MISMATCH) {
throw new IllegalArgumentException(e);
} else {
LOG.warn(e.getMessage());
}
}
} | class PulsarPartitionSplitReaderBase<OUT>
implements SplitReader<PulsarMessage<OUT>, PulsarPartitionSplit> {
private static final Logger LOG = LoggerFactory.getLogger(PulsarPartitionSplitReaderBase.class);
protected final PulsarClient pulsarClient;
protected final PulsarAdmin pulsarAdmin;
protected final Configuration configuration;
protected final SourceConfiguration sourceConfiguration;
protected final ConfigurationDataCustomizer<ConsumerConfigurationData<byte[]>>
consumerConfigurationCustomizer;
protected final PulsarDeserializationSchema<OUT> deserializationSchema;
protected final AtomicBoolean wakeup;
protected Consumer<byte[]> pulsarConsumer;
protected PulsarPartitionSplit registeredSplit;
protected PulsarPartitionSplitReaderBase(
PulsarClient pulsarClient,
PulsarAdmin pulsarAdmin,
Configuration configuration,
SourceConfiguration sourceConfiguration,
ConfigurationDataCustomizer<ConsumerConfigurationData<byte[]>>
consumerConfigurationCustomizer,
PulsarDeserializationSchema<OUT> deserializationSchema) {
this.pulsarClient = pulsarClient;
this.pulsarAdmin = pulsarAdmin;
this.configuration = configuration;
this.sourceConfiguration = sourceConfiguration;
this.consumerConfigurationCustomizer = consumerConfigurationCustomizer;
this.deserializationSchema = deserializationSchema;
this.wakeup = new AtomicBoolean(false);
}
@Override
public RecordsWithSplitIds<PulsarMessage<OUT>> fetch() {
RecordsBySplits.Builder<PulsarMessage<OUT>> builder = new RecordsBySplits.Builder<>();
if (pulsarConsumer == null || registeredSplit == null) {
return builder.build();
}
wakeup.compareAndSet(true, false);
StopCursor stopCursor = registeredSplit.getStopCursor();
String splitId = registeredSplit.splitId();
CollectorSupplier<OUT> supplier = new CollectorSupplier<>(splitId, builder);
Deadline deadline = Deadline.fromNow(sourceConfiguration.getMaxFetchTime());
for (int messageNum = 0;
messageNum < sourceConfiguration.getMaxFetchRecords()
&& deadline.hasTimeLeft()
&& isNotWakeup();
messageNum++) {
try {
Duration timeout = deadline.timeLeftIfAny();
Message<byte[]> message = pollMessage(timeout);
if (stopCursor.shouldStop(message)) {
builder.addFinishedSplit(splitId);
break;
}
deserializationSchema.deserialize(message, supplier.collector(message));
finishedPollMessage(message);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
break;
} catch (TimeoutException e) {
break;
} catch (Exception e) {
LOG.error("", e);
break;
}
}
return builder.build();
}
@Override
public void handleSplitsChanges(SplitsChange<PulsarPartitionSplit> splitsChanges) {
LOG.debug("Handle split changes {}", splitsChanges);
if (!(splitsChanges instanceof SplitsAddition)) {
throw new UnsupportedOperationException(
String.format(
"The SplitChange type of %s is not supported.",
splitsChanges.getClass()));
}
if (registeredSplit != null) {
throw new IllegalStateException("This split reader have assigned split.");
}
List<PulsarPartitionSplit> newSplits = splitsChanges.splits();
Preconditions.checkArgument(
newSplits.size() == 1, "This pulsar split reader only support one split.");
PulsarPartitionSplit newSplit = newSplits.get(0);
Consumer<byte[]> consumer = createPulsarConsumer(newSplit);
newSplit.open(configuration, sourceConfiguration);
startConsumer(newSplit, consumer);
LOG.info("Register split {} consumer for current reader.", newSplit);
this.registeredSplit = newSplit;
this.pulsarConsumer = consumer;
}
@Override
public void wakeUp() {
wakeup.compareAndSet(false, true);
}
@Override
public void close() {
if (pulsarConsumer != null) {
sneakyClient(() -> pulsarConsumer.close());
}
}
protected abstract Message<byte[]> pollMessage(Duration timeout)
throws ExecutionException, InterruptedException, TimeoutException;
protected abstract void finishedPollMessage(Message<byte[]> message);
protected abstract void startConsumer(PulsarPartitionSplit split, Consumer<byte[]> consumer);
protected boolean isNotWakeup() {
return !wakeup.get();
}
/** Create a specified {@link Consumer} by the given split information. */
protected Consumer<byte[]> createPulsarConsumer(PulsarPartitionSplit split) {
return createPulsarConsumer(split.getPartition());
}
/** Create a specified {@link Consumer} by the given topic partition. */
protected Consumer<byte[]> createPulsarConsumer(TopicPartition partition) {
ConfigurationDataCustomizer<ConsumerConfigurationData<byte[]>> customizer =
consumerConfigurationCustomizer.compose(
config -> {
config.setTopicsPattern(null);
config.setTopicNames(singleton(partition.getFullTopicName()));
if (sourceConfiguration.getSubscriptionType()
== SubscriptionType.Key_Shared) {
KeySharedPolicy policy =
KeySharedPolicy.stickyHashRange()
.ranges(partition.getPulsarRange());
config.setKeySharedPolicy(policy);
}
});
return sneakyClient(
() -> createConsumer(pulsarClient, Schema.BYTES, configuration, customizer));
}
} | class PulsarPartitionSplitReaderBase<OUT>
implements SplitReader<PulsarMessage<OUT>, PulsarPartitionSplit> {
private static final Logger LOG = LoggerFactory.getLogger(PulsarPartitionSplitReaderBase.class);
protected final PulsarClient pulsarClient;
protected final PulsarAdmin pulsarAdmin;
protected final Configuration configuration;
protected final SourceConfiguration sourceConfiguration;
protected final ConfigurationDataCustomizer<ConsumerConfigurationData<byte[]>>
consumerConfigurationCustomizer;
protected final PulsarDeserializationSchema<OUT> deserializationSchema;
protected final AtomicBoolean wakeup;
protected Consumer<byte[]> pulsarConsumer;
protected PulsarPartitionSplit registeredSplit;
protected PulsarPartitionSplitReaderBase(
PulsarClient pulsarClient,
PulsarAdmin pulsarAdmin,
Configuration configuration,
SourceConfiguration sourceConfiguration,
ConfigurationDataCustomizer<ConsumerConfigurationData<byte[]>>
consumerConfigurationCustomizer,
PulsarDeserializationSchema<OUT> deserializationSchema) {
this.pulsarClient = pulsarClient;
this.pulsarAdmin = pulsarAdmin;
this.configuration = configuration;
this.sourceConfiguration = sourceConfiguration;
this.consumerConfigurationCustomizer = consumerConfigurationCustomizer;
this.deserializationSchema = deserializationSchema;
this.wakeup = new AtomicBoolean(false);
}
@Override
public RecordsWithSplitIds<PulsarMessage<OUT>> fetch() throws IOException {
RecordsBySplits.Builder<PulsarMessage<OUT>> builder = new RecordsBySplits.Builder<>();
if (pulsarConsumer == null || registeredSplit == null) {
return builder.build();
}
wakeup.compareAndSet(true, false);
StopCursor stopCursor = registeredSplit.getStopCursor();
String splitId = registeredSplit.splitId();
PulsarMessageCollector<OUT> collector = new PulsarMessageCollector<>(splitId, builder);
Deadline deadline = Deadline.fromNow(sourceConfiguration.getMaxFetchTime());
for (int messageNum = 0;
messageNum < sourceConfiguration.getMaxFetchRecords()
&& deadline.hasTimeLeft()
&& isNotWakeup();
messageNum++) {
try {
Duration timeout = deadline.timeLeftIfAny();
Message<byte[]> message = pollMessage(timeout);
collector.setMessage(message);
deserializationSchema.deserialize(message, collector);
finishedPollMessage(message);
if (stopCursor.shouldStop(message)) {
builder.addFinishedSplit(splitId);
break;
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
break;
} catch (TimeoutException e) {
break;
} catch (ExecutionException e) {
LOG.error("Error in polling message from pulsar consumer.", e);
break;
} catch (Exception e) {
throw new IOException(e);
}
}
return builder.build();
}
@Override
public void handleSplitsChanges(SplitsChange<PulsarPartitionSplit> splitsChanges) {
LOG.debug("Handle split changes {}", splitsChanges);
if (!(splitsChanges instanceof SplitsAddition)) {
throw new UnsupportedOperationException(
String.format(
"The SplitChange type of %s is not supported.",
splitsChanges.getClass()));
}
if (registeredSplit != null) {
throw new IllegalStateException("This split reader have assigned split.");
}
List<PulsarPartitionSplit> newSplits = splitsChanges.splits();
Preconditions.checkArgument(
newSplits.size() == 1, "This pulsar split reader only support one split.");
PulsarPartitionSplit newSplit = newSplits.get(0);
Consumer<byte[]> consumer = createPulsarConsumer(newSplit);
newSplit.open(pulsarAdmin);
startConsumer(newSplit, consumer);
LOG.info("Register split {} consumer for current reader.", newSplit);
this.registeredSplit = newSplit;
this.pulsarConsumer = consumer;
}
@Override
public void wakeUp() {
wakeup.compareAndSet(false, true);
}
@Override
public void close() {
if (pulsarConsumer != null) {
sneakyClient(() -> pulsarConsumer.close());
}
}
protected abstract Message<byte[]> pollMessage(Duration timeout)
throws ExecutionException, InterruptedException, TimeoutException;
protected abstract void finishedPollMessage(Message<byte[]> message);
protected abstract void startConsumer(PulsarPartitionSplit split, Consumer<byte[]> consumer);
protected boolean isNotWakeup() {
return !wakeup.get();
}
/** Create a specified {@link Consumer} by the given split information. */
protected Consumer<byte[]> createPulsarConsumer(PulsarPartitionSplit split) {
return createPulsarConsumer(split.getPartition());
}
/** Create a specified {@link Consumer} by the given topic partition. */
protected Consumer<byte[]> createPulsarConsumer(TopicPartition partition) {
ConfigurationDataCustomizer<ConsumerConfigurationData<byte[]>> customizer =
consumerConfigurationCustomizer.compose(
config -> {
config.setTopicsPattern(null);
config.setTopicNames(singleton(partition.getFullTopicName()));
if (sourceConfiguration.getSubscriptionType()
== SubscriptionType.Key_Shared) {
KeySharedPolicy policy =
KeySharedPolicy.stickyHashRange()
.ranges(partition.getPulsarRange());
config.setKeySharedPolicy(policy);
}
});
return sneakyClient(
() -> createConsumer(pulsarClient, Schema.BYTES, configuration, customizer));
}
} |
|
`SeContainerInitializer.enableInterceptors()` only enables the interceptors for the synthetic bean archive. `MetricsInterceptor` and friends are enabled globally (for the application) using `@Priority` - so there's no need to enable them locally. In fact, this enablement is just ignored. | public void addInterceptor(SeContainerInitializer initialize, Class<?> interceptorClass) {
initialize.enableInterceptors(interceptorClass);
} | initialize.enableInterceptors(interceptorClass); | public void addInterceptor(SeContainerInitializer initialize, Class<?> interceptorClass) {
initialize.enableInterceptors(interceptorClass);
} | class WeldDeploymentTemplate {
public SeContainerInitializer createWeld() throws Exception {
new URLConnection(new URL("http:
@Override
public void connect() throws IOException {
}
}.setDefaultUseCaches(false);
Class<?> clazz = Class.forName("sun.net.www.protocol.jar.JarFileFactory");
Field field = clazz.getDeclaredField("fileCache");
field.setAccessible(true);
Map<String, JarFile> fileCache = (Map<String, JarFile>) field.get(null);
for (Map.Entry<String, JarFile> e : new HashSet<>(fileCache.entrySet())) {
e.getValue().close();
}
fileCache.clear();
field = clazz.getDeclaredField("urlCache");
field.setAccessible(true);
Map<JarFile, URL> urlCache = (Map<JarFile, URL>) field.get(null);
for (Map.Entry<JarFile, URL> e : new HashSet<>(urlCache.entrySet())) {
e.getKey().close();
}
urlCache.clear();
Weld weld = new Weld();
weld.disableDiscovery();
weld.skipShutdownHook();
weld.property(ConfigurationKey.UNUSED_BEANS_EXCLUDE_TYPE.get(), UnusedBeans.ALL);
weld.property(ConfigurationKey.UNUSED_BEANS_EXCLUDE_ANNOTATION.get(), "javax\\.ws\\.rs.*|javax\\.servlet\\.annotation.*");
return weld;
}
public void addClass(SeContainerInitializer initializer, Class<?> clazz) {
initializer.addBeanClasses(clazz);
}
@ContextObject("weld.container")
public SeContainer doBoot( SeContainerInitializer initializer) throws Exception {
SeContainer container = initializer.initialize();
Set<Bean<?>> instance = container.getBeanManager().getBeans(Object.class);
for (Bean<?> bean : instance) {
if (container.getBeanManager().isNormalScope(bean.getScope())) {
container.getBeanManager().getReference(bean, Object.class, container.getBeanManager().createCreationalContext(bean));
}
}
return container;
}
public void registerShutdownHook(@ContextObject("weld.container") SeContainer container) {
Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
@Override
public void run() {
}
}, "Weld Shutdown Hook Thread"));
}
public void setupInjection(SeContainer container) {
RuntimeInjector.setFactory(new InjectionFactory() {
@Override
public <T> InjectionInstance<T> create(Class<T> type) {
Instance<T> instance = container.select(type);
if (instance.isResolvable()) {
return new InjectionInstance<T>() {
@Override
public T newInstance() {
return instance.get();
}
};
} else {
return new InjectionInstance<T>() {
@Override
public T newInstance() {
try {
return type.newInstance();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
};
}
}
});
}
} | class WeldDeploymentTemplate {
public SeContainerInitializer createWeld() throws Exception {
new URLConnection(new URL("http:
@Override
public void connect() throws IOException {
}
}.setDefaultUseCaches(false);
Class<?> clazz = Class.forName("sun.net.www.protocol.jar.JarFileFactory");
Field field = clazz.getDeclaredField("fileCache");
field.setAccessible(true);
Map<String, JarFile> fileCache = (Map<String, JarFile>) field.get(null);
for (Map.Entry<String, JarFile> e : new HashSet<>(fileCache.entrySet())) {
e.getValue().close();
}
fileCache.clear();
field = clazz.getDeclaredField("urlCache");
field.setAccessible(true);
Map<JarFile, URL> urlCache = (Map<JarFile, URL>) field.get(null);
for (Map.Entry<JarFile, URL> e : new HashSet<>(urlCache.entrySet())) {
e.getKey().close();
}
urlCache.clear();
Weld weld = new Weld();
weld.disableDiscovery();
weld.skipShutdownHook();
weld.property(ConfigurationKey.UNUSED_BEANS_EXCLUDE_TYPE.get(), UnusedBeans.ALL);
weld.property(ConfigurationKey.UNUSED_BEANS_EXCLUDE_ANNOTATION.get(), "javax\\.ws\\.rs.*|javax\\.servlet\\.annotation.*");
return weld;
}
public void addClass(SeContainerInitializer initializer, Class<?> clazz) {
initializer.addBeanClasses(clazz);
}
@ContextObject("weld.container")
public SeContainer doBoot( SeContainerInitializer initializer) throws Exception {
SeContainer container = initializer.initialize();
Set<Bean<?>> instance = container.getBeanManager().getBeans(Object.class);
for (Bean<?> bean : instance) {
if (container.getBeanManager().isNormalScope(bean.getScope())) {
container.getBeanManager().getReference(bean, Object.class, container.getBeanManager().createCreationalContext(bean));
}
}
return container;
}
public void registerShutdownHook(@ContextObject("weld.container") SeContainer container) {
Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
@Override
public void run() {
}
}, "Weld Shutdown Hook Thread"));
}
public void setupInjection(SeContainer container) {
RuntimeInjector.setFactory(new InjectionFactory() {
@Override
public <T> InjectionInstance<T> create(Class<T> type) {
Instance<T> instance = container.select(type);
if (instance.isResolvable()) {
return new InjectionInstance<T>() {
@Override
public T newInstance() {
return instance.get();
}
};
} else {
return new InjectionInstance<T>() {
@Override
public T newInstance() {
try {
return type.newInstance();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
};
}
}
});
}
} |
Since the CodeActionNodeValidator is an expensive one, let's do the other checks first and do the CodeActionNodeValidator last. | public boolean validate(CodeActionContext context, RangeBasedPositionDetails positionDetails) {
Node node = positionDetails.matchedCodeActionNode();
return CodeActionNodeValidator.validate(context.nodeAtRange()) && context.currentSyntaxTree().isPresent()
&& context.currentSemanticModel().isPresent() && node.parent().kind() != SyntaxKind.CONST_DECLARATION
&& node.parent().kind() != SyntaxKind.INVALID_EXPRESSION_STATEMENT;
} | return CodeActionNodeValidator.validate(context.nodeAtRange()) && context.currentSyntaxTree().isPresent() | public boolean validate(CodeActionContext context, RangeBasedPositionDetails positionDetails) {
Node node = positionDetails.matchedCodeActionNode();
return context.currentSyntaxTree().isPresent() && context.currentSemanticModel().isPresent()
&& node.parent().kind() != SyntaxKind.CONST_DECLARATION
&& node.parent().kind() != SyntaxKind.INVALID_EXPRESSION_STATEMENT
&& CodeActionNodeValidator.validate(context.nodeAtRange());
} | class ExtractToConstantCodeAction implements RangeBasedCodeActionProvider {
public static final String NAME = "Extract To Constant";
private static final String CONSTANT_NAME_PREFIX = "CONST";
public List<SyntaxKind> getSyntaxKinds() {
return List.of(SyntaxKind.BOOLEAN_LITERAL, SyntaxKind.NUMERIC_LITERAL,
SyntaxKind.STRING_LITERAL, SyntaxKind.BINARY_EXPRESSION);
}
@Override
/**
* {@inheritDoc}
*/
@Override
public List<CodeAction> getCodeActions(CodeActionContext context,
RangeBasedPositionDetails posDetails) {
Node node = posDetails.matchedCodeActionNode();
ExtractToConstantCodeAction.BasicLiteralNodeValidator nodeValidator
= new ExtractToConstantCodeAction.BasicLiteralNodeValidator();
node.accept(nodeValidator);
if (nodeValidator.getInvalidNode()) {
return Collections.emptyList();
}
String constName = getLocalVarName(context);
String value = node.toSourceCode().strip();
LineRange replaceRange = node.lineRange();
Optional<TypeSymbol> typeSymbol = context.currentSemanticModel().get().typeOf(node);
if (typeSymbol.isEmpty()) {
return Collections.emptyList();
}
Position constDeclPosition = getPosition(context);
String constDeclStr = String.format("const %s %s = %s;%n", typeSymbol.get().signature(), constName, value);
TextEdit constDeclEdit = new TextEdit(new Range(constDeclPosition, constDeclPosition), constDeclStr);
TextEdit replaceEdit = new TextEdit(new Range(PositionUtil.toPosition(replaceRange.startLine()),
PositionUtil.toPosition(replaceRange.endLine())), constName);
return Collections.singletonList(CodeActionUtil.createCodeAction(CommandConstants.EXTRACT_TO_CONSTANT,
List.of(constDeclEdit, replaceEdit), context.fileUri(), CodeActionKind.RefactorExtract));
}
@Override
public String getName() {
return NAME;
}
/**
* Node visitor to ensure the highlighted range does not include nodes other than BasicLiteralNode.
*
*/
static class BasicLiteralNodeValidator extends NodeVisitor {
private boolean invalidNode = false;
@Override
public void visit(BinaryExpressionNode node) {
node.lhsExpr().accept(this);
node.rhsExpr().accept(this);
}
@Override
public void visit(BasicLiteralNode node) {
}
@Override
protected void visitSyntaxNode(Node node) {
invalidNode = true;
}
public Boolean getInvalidNode() {
return invalidNode;
}
}
private String getLocalVarName(CodeActionContext context) {
Position pos = context.range().getEnd();
Set<String> allNames = context.visibleSymbols(new Position(pos.getLine(), pos.getCharacter())).stream()
.map(Symbol::getName)
.filter(Optional::isPresent)
.map(Optional::get)
.collect(Collectors.toSet());
return NameUtil.generateTypeName(CONSTANT_NAME_PREFIX, allNames);
}
private Position getPosition(CodeActionContext context) {
ModulePartNode modulePartNode = context.currentSyntaxTree().get().rootNode();
NodeList<ImportDeclarationNode> importsList = modulePartNode.imports();
if (importsList.isEmpty()) {
return PositionUtil.toPosition(modulePartNode.lineRange().startLine());
}
ImportDeclarationNode lastImport = importsList.get(importsList.size() - 1);
return new Position(lastImport.lineRange().endLine().line() + 2, 0);
}
} | class ExtractToConstantCodeAction implements RangeBasedCodeActionProvider {
public static final String NAME = "Extract To Constant";
private static final String CONSTANT_NAME_PREFIX = "CONST";
public List<SyntaxKind> getSyntaxKinds() {
return List.of(SyntaxKind.BOOLEAN_LITERAL, SyntaxKind.NUMERIC_LITERAL,
SyntaxKind.STRING_LITERAL, SyntaxKind.BINARY_EXPRESSION, SyntaxKind.UNARY_EXPRESSION);
}
@Override
/**
* {@inheritDoc}
*/
@Override
public List<CodeAction> getCodeActions(CodeActionContext context,
RangeBasedPositionDetails posDetails) {
Node node = posDetails.matchedCodeActionNode();
BasicLiteralNodeValidator nodeValidator = new BasicLiteralNodeValidator();
node.accept(nodeValidator);
if (nodeValidator.getInvalidNode()) {
return Collections.emptyList();
}
String constName = getLocalVarName(context);
String value = node.toSourceCode().strip();
LineRange replaceRange = node.lineRange();
Optional<TypeSymbol> typeSymbol = context.currentSemanticModel().get().typeOf(node);
if (typeSymbol.isEmpty()) {
return Collections.emptyList();
}
Position constDeclPosition = getPosition(context);
String constDeclStr = String.format("const %s %s = %s;%n", typeSymbol.get().signature(), constName, value);
TextEdit constDeclEdit = new TextEdit(new Range(constDeclPosition, constDeclPosition), constDeclStr);
TextEdit replaceEdit = new TextEdit(new Range(PositionUtil.toPosition(replaceRange.startLine()),
PositionUtil.toPosition(replaceRange.endLine())), constName);
return Collections.singletonList(CodeActionUtil.createCodeAction(CommandConstants.EXTRACT_TO_CONSTANT,
List.of(constDeclEdit, replaceEdit), context.fileUri(), CodeActionKind.RefactorExtract));
}
@Override
public String getName() {
return NAME;
}
private String getLocalVarName(CodeActionContext context) {
Position pos = context.range().getEnd();
Set<String> allNames = context.visibleSymbols(new Position(pos.getLine(), pos.getCharacter())).stream()
.map(Symbol::getName)
.filter(Optional::isPresent)
.map(Optional::get)
.collect(Collectors.toSet());
return NameUtil.generateTypeName(CONSTANT_NAME_PREFIX, allNames);
}
private Position getPosition(CodeActionContext context) {
ModulePartNode modulePartNode = context.currentSyntaxTree().get().rootNode();
NodeList<ImportDeclarationNode> importsList = modulePartNode.imports();
if (importsList.isEmpty()) {
return PositionUtil.toPosition(modulePartNode.lineRange().startLine());
}
ImportDeclarationNode lastImport = importsList.get(importsList.size() - 1);
return new Position(lastImport.lineRange().endLine().line() + 2, 0);
}
/**
* Node visitor to ensure the highlighted range does not include nodes other than BasicLiteralNode.
*
*/
static class BasicLiteralNodeValidator extends NodeVisitor {
private boolean invalidNode = false;
@Override
public void visit(BinaryExpressionNode node) {
node.lhsExpr().accept(this);
node.rhsExpr().accept(this);
}
@Override
public void visit(BasicLiteralNode node) {
}
@Override
public void visit(UnaryExpressionNode node) {
}
@Override
protected void visitSyntaxNode(Node node) {
invalidNode = true;
}
public Boolean getInvalidNode() {
return invalidNode;
}
}
} |
So I'm new to this part of the code, so please correct me if I'm wrong, but this will now invoke tear down each time it constructs a new new output iterator for a given elem off the input iterator and it seems like we aren't done with the DoFn at this point necessarily. | protected OutputT computeNext() {
boolean isBundleStarted = false;
boolean isBundleFinished = false;
while (true) {
if (outputIterator.hasNext()) {
return outputIterator.next();
}
if (!isBundleStarted) {
isBundleStarted = true;
doFnRunner.startBundle();
}
clearOutput();
if (inputIterator.hasNext()) {
doFnRunner.processElement(inputIterator.next());
outputIterator = getOutputIterator();
} else if (timerDataIterator.hasNext()) {
fireTimer(timerDataIterator.next());
outputIterator = getOutputIterator();
} else {
if (!isBundleFinished) {
isBundleFinished = true;
doFnRunner.finishBundle();
outputIterator = getOutputIterator();
continue;
}
DoFnInvokers.invokerFor(doFn).invokeTeardown();
return endOfData();
}
}
} | DoFnInvokers.invokerFor(doFn).invokeTeardown(); | protected OutputT computeNext() {
if (!isBundleStarted) {
isBundleStarted = true;
doFnRunner.startBundle();
}
try {
while (true) {
if (outputIterator.hasNext()) {
return outputIterator.next();
}
clearOutput();
if (inputIterator.hasNext()) {
doFnRunner.processElement(inputIterator.next());
outputIterator = getOutputIterator();
} else if (timerDataIterator.hasNext()) {
fireTimer(timerDataIterator.next());
outputIterator = getOutputIterator();
} else {
if (!isBundleFinished) {
isBundleFinished = true;
doFnRunner.finishBundle();
outputIterator = getOutputIterator();
continue;
}
DoFnInvokers.invokerFor(doFn).invokeTeardown();
return endOfData();
}
}
} catch (final RuntimeException re) {
DoFnInvokers.invokerFor(doFn).invokeTeardown();
throw re;
}
} | class ProcCtxtIterator extends AbstractIterator<OutputT> {
private final Iterator<WindowedValue<FnInputT>> inputIterator;
private final DoFnRunner<FnInputT, FnOutputT> doFnRunner;
private Iterator<OutputT> outputIterator;
ProcCtxtIterator(
Iterator<WindowedValue<FnInputT>> iterator,
DoFnRunner<FnInputT, FnOutputT> doFnRunner) {
this.inputIterator = iterator;
this.doFnRunner = doFnRunner;
this.outputIterator = getOutputIterator();
}
@Override
private void fireTimer(
TimerInternals.TimerData timer) {
StateNamespace namespace = timer.getNamespace();
checkArgument(namespace instanceof StateNamespaces.WindowNamespace);
BoundedWindow window = ((StateNamespaces.WindowNamespace) namespace).getWindow();
doFnRunner.onTimer(timer.getTimerId(), window, timer.getTimestamp(), timer.getDomain());
}
} | class ProcCtxtIterator extends AbstractIterator<OutputT> {
private final Iterator<WindowedValue<FnInputT>> inputIterator;
private final DoFnRunner<FnInputT, FnOutputT> doFnRunner;
private Iterator<OutputT> outputIterator;
private boolean isBundleStarted;
private boolean isBundleFinished;
ProcCtxtIterator(
Iterator<WindowedValue<FnInputT>> iterator,
DoFnRunner<FnInputT, FnOutputT> doFnRunner) {
this.inputIterator = iterator;
this.doFnRunner = doFnRunner;
this.outputIterator = getOutputIterator();
}
@Override
private void fireTimer(
TimerInternals.TimerData timer) {
StateNamespace namespace = timer.getNamespace();
checkArgument(namespace instanceof StateNamespaces.WindowNamespace);
BoundedWindow window = ((StateNamespaces.WindowNamespace) namespace).getWindow();
doFnRunner.onTimer(timer.getTimerId(), window, timer.getTimestamp(), timer.getDomain());
}
} |
Same here - call the convenience method. | public Completions getCompletions(String deploymentId, String prompt) {
RequestOptions requestOptions = new RequestOptions();
CompletionsOptions completionsOptions = CompletionsUtils.DefaultCompletionsOptions(prompt);
return getCompletionsWithResponse(deploymentId, BinaryData.fromObject(completionsOptions), requestOptions)
.getValue()
.toObject(Completions.class);
} | return getCompletionsWithResponse(deploymentId, BinaryData.fromObject(completionsOptions), requestOptions) | public Completions getCompletions(String deploymentId, String prompt) {
return getCompletions(deploymentId, CompletionsUtils.defaultCompletionsOptions(prompt));
} | class OpenAIClient {
@Generated private final OpenAIAsyncClient client;
/**
* Initializes an instance of OpenAIClient class.
*
* @param client the async client.
*/
@Generated
OpenAIClient(OpenAIAsyncClient client) {
this.client = client;
}
/**
* Return the embeddings for a given prompt.
*
* <p><strong>Request Body Schema</strong>
*
* <pre>{@code
* {
* user: String (Optional)
* model: String (Optional)
* input: InputModelBase (Required)
* }
* }</pre>
*
* <p><strong>Response Body Schema</strong>
*
* <pre>{@code
* {
* data (Required): [
* (Required){
* embedding (Required): [
* double (Required)
* ]
* index: int (Required)
* }
* ]
* usage (Required): {
* prompt_tokens: int (Required)
* total_tokens: int (Required)
* }
* }
* }</pre>
*
* @param deploymentId deployment id of the deployed model.
* @param embeddingsOptions The configuration information for an embeddings request. Embeddings measure the
* relatedness of text strings and are commonly used for search, clustering, recommendations, and other similar
* scenarios.
* @param requestOptions The options to configure the HTTP request before HTTP client sends it.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws ClientAuthenticationException thrown if the request is rejected by server on status code 401.
* @throws ResourceNotFoundException thrown if the request is rejected by server on status code 404.
* @throws ResourceModifiedException thrown if the request is rejected by server on status code 409.
* @return representation of the response data from an embeddings request. Embeddings measure the relatedness of
* text strings and are commonly used for search, clustering, recommendations, and other similar scenarios along
* with {@link Response}.
*/
@Generated
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<BinaryData> getEmbeddingsWithResponse(
String deploymentId, BinaryData embeddingsOptions, RequestOptions requestOptions) {
return this.client.getEmbeddingsWithResponse(deploymentId, embeddingsOptions, requestOptions).block();
}
/**
* Gets completions for the provided input prompts. Completions support a wide variety of tasks and generate text
* that continues from or "completes" provided prompt data.
*
* <p><strong>Request Body Schema</strong>
*
* <pre>{@code
* {
* prompt (Required): [
* String (Required)
* ]
* max_tokens: Integer (Optional)
* temperature: Double (Optional)
* top_p: Double (Optional)
* logit_bias (Optional): {
* String: int (Optional)
* }
* user: String (Optional)
* n: Integer (Optional)
* logprobs: Integer (Optional)
* echo: Boolean (Optional)
* stop (Optional): [
* String (Optional)
* ]
* presence_penalty: Double (Optional)
* frequency_penalty: Double (Optional)
* best_of: Integer (Optional)
* stream: Boolean (Optional)
* model: String (Optional)
* }
* }</pre>
*
* <p><strong>Response Body Schema</strong>
*
* <pre>{@code
* {
* id: String (Required)
* created: int (Required)
* choices (Required): [
* (Required){
* text: String (Required)
* index: int (Required)
* logprobs (Required): {
* tokens (Required): [
* String (Required)
* ]
* token_logprobs (Required): [
* double (Required)
* ]
* top_logprobs (Required): [
* (Required){
* String: double (Required)
* }
* ]
* text_offset (Required): [
* int (Required)
* ]
* }
* finish_reason: String(stopped/tokenLimitReached/contentFiltered) (Required)
* }
* ]
* usage (Required): {
* completion_tokens: int (Required)
* prompt_tokens: int (Required)
* total_tokens: int (Required)
* }
* }
* }</pre>
*
* @param deploymentId deployment id of the deployed model.
* @param completionsOptions The configuration information for a completions request. Completions support a wide
* variety of tasks and generate text that continues from or "completes" provided prompt data.
* @param requestOptions The options to configure the HTTP request before HTTP client sends it.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws ClientAuthenticationException thrown if the request is rejected by server on status code 401.
* @throws ResourceNotFoundException thrown if the request is rejected by server on status code 404.
* @throws ResourceModifiedException thrown if the request is rejected by server on status code 409.
* @return completions for the provided input prompts. Completions support a wide variety of tasks and generate text
* that continues from or "completes" provided prompt data along with {@link Response}.
*/
@Generated
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<BinaryData> getCompletionsWithResponse(
String deploymentId, BinaryData completionsOptions, RequestOptions requestOptions) {
return this.client.getCompletionsWithResponse(deploymentId, completionsOptions, requestOptions).block();
}
/**
* Gets completions for the provided input prompts. Completions support a wide variety of tasks and generate text
* that continues from or "completes" provided prompt data.
*
* <p><strong>Request Body Schema</strong>
*
* <pre>{@code
* {
* prompt (Required): [
* String (Required)
* ]
* max_tokens: Integer (Optional)
* temperature: Double (Optional)
* top_p: Double (Optional)
* logit_bias (Optional): {
* String: int (Optional)
* }
* user: String (Optional)
* n: Integer (Optional)
* logprobs: Integer (Optional)
* echo: Boolean (Optional)
* stop (Optional): [
* String (Optional)
* ]
* presence_penalty: Double (Optional)
* frequency_penalty: Double (Optional)
* best_of: Integer (Optional)
* stream: Boolean (Optional)
* model: String (Optional)
* }
* }</pre>
*
* <p><strong>Response Body Schema</strong>
*
* <pre>{@code
* {
* id: String (Required)
* created: int (Required)
* choices (Required): [
* (Required){
* text: String (Required)
* index: int (Required)
* logprobs (Required): {
* tokens (Required): [
* String (Required)
* ]
* token_logprobs (Required): [
* double (Required)
* ]
* top_logprobs (Required): [
* (Required){
* String: double (Required)
* }
* ]
* text_offset (Required): [
* int (Required)
* ]
* }
* finish_reason: String(stopped/tokenLimitReached/contentFiltered) (Required)
* }
* ]
* usage (Required): {
* completion_tokens: int (Required)
* prompt_tokens: int (Required)
* total_tokens: int (Required)
* }
* }
* }</pre>
*
* @param deploymentId deployment id of the deployed model.
* @param prompt The prompts to generate values from.
* @param requestOptions The options to configure the HTTP request before HTTP client sends it.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws ClientAuthenticationException thrown if the request is rejected by server on status code 401.
* @throws ResourceNotFoundException thrown if the request is rejected by server on status code 404.
* @throws ResourceModifiedException thrown if the request is rejected by server on status code 409.
* @return completions for the provided input prompts. Completions support a wide variety of tasks and generate text
* that continues from or "completes" provided prompt data along with {@link Response}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<BinaryData> getCompletionsWithResponse(
String deploymentId, String prompt, RequestOptions requestOptions) {
CompletionsOptions completionsOptions = CompletionsUtils.DefaultCompletionsOptions(prompt);
BinaryData completionsOptionsRequest = BinaryData.fromObject(completionsOptions);
return this.client.getCompletionsWithResponse(deploymentId, completionsOptionsRequest, requestOptions).block();
}
/**
* Gets chat completions for the provided chat messages. Completions support a wide variety of tasks and generate
* text that continues from or "completes" provided prompt data.
*
* <p><strong>Request Body Schema</strong>
*
* <pre>{@code
* {
* messages (Required): [
* (Required){
* role: String(system/assistant/user) (Required)
* content: String (Optional)
* }
* ]
* max_tokens: Integer (Optional)
* temperature: Double (Optional)
* top_p: Double (Optional)
* logit_bias (Optional): {
* String: int (Optional)
* }
* user: String (Optional)
* n: Integer (Optional)
* stop (Optional): [
* String (Optional)
* ]
* presence_penalty: Double (Optional)
* frequency_penalty: Double (Optional)
* stream: Boolean (Optional)
* model: String (Optional)
* }
* }</pre>
*
* <p><strong>Response Body Schema</strong>
*
* <pre>{@code
* {
* id: String (Required)
* created: int (Required)
* choices (Required): [
* (Required){
* message (Optional): {
* role: String(system/assistant/user) (Required)
* content: String (Optional)
* }
* index: int (Required)
* finish_reason: String(stopped/tokenLimitReached/contentFiltered) (Required)
* delta (Optional): (recursive schema, see delta above)
* }
* ]
* usage (Required): {
* completion_tokens: int (Required)
* prompt_tokens: int (Required)
* total_tokens: int (Required)
* }
* }
* }</pre>
*
* @param deploymentId deployment id of the deployed model.
* @param chatCompletionsOptions The configuration information for a chat completions request. Completions support a
* wide variety of tasks and generate text that continues from or "completes" provided prompt data.
* @param requestOptions The options to configure the HTTP request before HTTP client sends it.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws ClientAuthenticationException thrown if the request is rejected by server on status code 401.
* @throws ResourceNotFoundException thrown if the request is rejected by server on status code 404.
* @throws ResourceModifiedException thrown if the request is rejected by server on status code 409.
* @return chat completions for the provided chat messages. Completions support a wide variety of tasks and generate
* text that continues from or "completes" provided prompt data along with {@link Response}.
*/
@Generated
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<BinaryData> getChatCompletionsWithResponse(
String deploymentId, BinaryData chatCompletionsOptions, RequestOptions requestOptions) {
return this.client.getChatCompletionsWithResponse(deploymentId, chatCompletionsOptions, requestOptions).block();
}
/**
* Return the embeddings for a given prompt.
*
* @param deploymentId deployment id of the deployed model.
* @param embeddingsOptions The configuration information for an embeddings request. Embeddings measure the
* relatedness of text strings and are commonly used for search, clustering, recommendations, and other similar
* scenarios.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws ClientAuthenticationException thrown if the request is rejected by server on status code 401.
* @throws ResourceNotFoundException thrown if the request is rejected by server on status code 404.
* @throws ResourceModifiedException thrown if the request is rejected by server on status code 409.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return representation of the response data from an embeddings request. Embeddings measure the relatedness of
* text strings and are commonly used for search, clustering, recommendations, and other similar scenarios.
*/
@Generated
@ServiceMethod(returns = ReturnType.SINGLE)
public Embeddings getEmbeddings(String deploymentId, EmbeddingsOptions embeddingsOptions) {
RequestOptions requestOptions = new RequestOptions();
return getEmbeddingsWithResponse(deploymentId, BinaryData.fromObject(embeddingsOptions), requestOptions)
.getValue()
.toObject(Embeddings.class);
}
/**
* Gets completions for the provided input prompts. Completions support a wide variety of tasks and generate text
* that continues from or "completes" provided prompt data.
*
* @param deploymentId deployment id of the deployed model.
* @param completionsOptions The configuration information for a completions request. Completions support a wide
* variety of tasks and generate text that continues from or "completes" provided prompt data.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws ClientAuthenticationException thrown if the request is rejected by server on status code 401.
* @throws ResourceNotFoundException thrown if the request is rejected by server on status code 404.
* @throws ResourceModifiedException thrown if the request is rejected by server on status code 409.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return completions for the provided input prompts. Completions support a wide variety of tasks and generate text
* that continues from or "completes" provided prompt data.
*/
@Generated
@ServiceMethod(returns = ReturnType.SINGLE)
public Completions getCompletions(String deploymentId, CompletionsOptions completionsOptions) {
RequestOptions requestOptions = new RequestOptions();
return getCompletionsWithResponse(deploymentId, BinaryData.fromObject(completionsOptions), requestOptions)
.getValue()
.toObject(Completions.class);
}
/**
* Gets completions for the provided input prompts. Completions support a wide variety of tasks and generate text
* that continues from or "completes" provided prompt data.
*
* @param deploymentId deployment id of the deployed model.
* @param prompt The prompts to generate values from.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws ClientAuthenticationException thrown if the request is rejected by server on status code 401.
* @throws ResourceNotFoundException thrown if the request is rejected by server on status code 404.
* @throws ResourceModifiedException thrown if the request is rejected by server on status code 409.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return completions for the provided input prompts. Completions support a wide variety of tasks and generate text
* that continues from or "completes" provided prompt data.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
/**
* Gets chat completions for the provided chat messages. Completions support a wide variety of tasks and generate
* text that continues from or "completes" provided prompt data.
*
* @param deploymentId deployment id of the deployed model.
* @param chatCompletionsOptions The configuration information for a chat completions request. Completions support a
* wide variety of tasks and generate text that continues from or "completes" provided prompt data.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws ClientAuthenticationException thrown if the request is rejected by server on status code 401.
* @throws ResourceNotFoundException thrown if the request is rejected by server on status code 404.
* @throws ResourceModifiedException thrown if the request is rejected by server on status code 409.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return chat completions for the provided chat messages. Completions support a wide variety of tasks and generate
* text that continues from or "completes" provided prompt data.
*/
@Generated
@ServiceMethod(returns = ReturnType.SINGLE)
public ChatCompletions getChatCompletions(String deploymentId, ChatCompletionsOptions chatCompletionsOptions) {
RequestOptions requestOptions = new RequestOptions();
return getChatCompletionsWithResponse(
deploymentId, BinaryData.fromObject(chatCompletionsOptions), requestOptions)
.getValue()
.toObject(ChatCompletions.class);
}
} | class OpenAIClient {
@Generated private final OpenAIAsyncClient client;
/**
* Initializes an instance of OpenAIClient class.
*
* @param client the async client.
*/
@Generated
OpenAIClient(OpenAIAsyncClient client) {
this.client = client;
}
/**
* Return the embeddings for a given prompt.
*
* <p><strong>Request Body Schema</strong>
*
* <pre>{@code
* {
* user: String (Optional)
* model: String (Optional)
* input: InputModelBase (Required)
* }
* }</pre>
*
* <p><strong>Response Body Schema</strong>
*
* <pre>{@code
* {
* data (Required): [
* (Required){
* embedding (Required): [
* double (Required)
* ]
* index: int (Required)
* }
* ]
* usage (Required): {
* prompt_tokens: int (Required)
* total_tokens: int (Required)
* }
* }
* }</pre>
*
* @param deploymentId deployment id of the deployed model.
* @param embeddingsOptions The configuration information for an embeddings request. Embeddings measure the
* relatedness of text strings and are commonly used for search, clustering, recommendations, and other similar
* scenarios.
* @param requestOptions The options to configure the HTTP request before HTTP client sends it.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws ClientAuthenticationException thrown if the request is rejected by server on status code 401.
* @throws ResourceNotFoundException thrown if the request is rejected by server on status code 404.
* @throws ResourceModifiedException thrown if the request is rejected by server on status code 409.
* @return representation of the response data from an embeddings request. Embeddings measure the relatedness of
* text strings and are commonly used for search, clustering, recommendations, and other similar scenarios along
* with {@link Response}.
*/
@Generated
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<BinaryData> getEmbeddingsWithResponse(
String deploymentId, BinaryData embeddingsOptions, RequestOptions requestOptions) {
return this.client.getEmbeddingsWithResponse(deploymentId, embeddingsOptions, requestOptions).block();
}
/**
* Gets completions for the provided input prompts. Completions support a wide variety of tasks and generate text
* that continues from or "completes" provided prompt data.
*
* <p><strong>Request Body Schema</strong>
*
* <pre>{@code
* {
* prompt (Required): [
* String (Required)
* ]
* max_tokens: Integer (Optional)
* temperature: Double (Optional)
* top_p: Double (Optional)
* logit_bias (Optional): {
* String: int (Optional)
* }
* user: String (Optional)
* n: Integer (Optional)
* logprobs: Integer (Optional)
* echo: Boolean (Optional)
* stop (Optional): [
* String (Optional)
* ]
* presence_penalty: Double (Optional)
* frequency_penalty: Double (Optional)
* best_of: Integer (Optional)
* stream: Boolean (Optional)
* model: String (Optional)
* }
* }</pre>
*
* <p><strong>Response Body Schema</strong>
*
* <pre>{@code
* {
* id: String (Required)
* created: int (Required)
* choices (Required): [
* (Required){
* text: String (Required)
* index: int (Required)
* logprobs (Required): {
* tokens (Required): [
* String (Required)
* ]
* token_logprobs (Required): [
* double (Required)
* ]
* top_logprobs (Required): [
* (Required){
* String: double (Required)
* }
* ]
* text_offset (Required): [
* int (Required)
* ]
* }
* finish_reason: String(stopped/tokenLimitReached/contentFiltered) (Required)
* }
* ]
* usage (Required): {
* completion_tokens: int (Required)
* prompt_tokens: int (Required)
* total_tokens: int (Required)
* }
* }
* }</pre>
*
* @param deploymentId deployment id of the deployed model.
* @param completionsOptions The configuration information for a completions request. Completions support a wide
* variety of tasks and generate text that continues from or "completes" provided prompt data.
* @param requestOptions The options to configure the HTTP request before HTTP client sends it.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws ClientAuthenticationException thrown if the request is rejected by server on status code 401.
* @throws ResourceNotFoundException thrown if the request is rejected by server on status code 404.
* @throws ResourceModifiedException thrown if the request is rejected by server on status code 409.
* @return completions for the provided input prompts. Completions support a wide variety of tasks and generate text
* that continues from or "completes" provided prompt data along with {@link Response}.
*/
@Generated
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<BinaryData> getCompletionsWithResponse(
String deploymentId, BinaryData completionsOptions, RequestOptions requestOptions) {
return this.client.getCompletionsWithResponse(deploymentId, completionsOptions, requestOptions).block();
}
/**
* Gets chat completions for the provided chat messages. Completions support a wide variety of tasks and generate
* text that continues from or "completes" provided prompt data.
*
* <p><strong>Request Body Schema</strong>
*
* <pre>{@code
* {
* messages (Required): [
* (Required){
* role: String(system/assistant/user) (Required)
* content: String (Optional)
* }
* ]
* max_tokens: Integer (Optional)
* temperature: Double (Optional)
* top_p: Double (Optional)
* logit_bias (Optional): {
* String: int (Optional)
* }
* user: String (Optional)
* n: Integer (Optional)
* stop (Optional): [
* String (Optional)
* ]
* presence_penalty: Double (Optional)
* frequency_penalty: Double (Optional)
* stream: Boolean (Optional)
* model: String (Optional)
* }
* }</pre>
*
* <p><strong>Response Body Schema</strong>
*
* <pre>{@code
* {
* id: String (Required)
* created: int (Required)
* choices (Required): [
* (Required){
* message (Optional): {
* role: String(system/assistant/user) (Required)
* content: String (Optional)
* }
* index: int (Required)
* finish_reason: String(stopped/tokenLimitReached/contentFiltered) (Required)
* delta (Optional): (recursive schema, see delta above)
* }
* ]
* usage (Required): {
* completion_tokens: int (Required)
* prompt_tokens: int (Required)
* total_tokens: int (Required)
* }
* }
* }</pre>
*
* @param deploymentId deployment id of the deployed model.
* @param chatCompletionsOptions The configuration information for a chat completions request. Completions support a
* wide variety of tasks and generate text that continues from or "completes" provided prompt data.
* @param requestOptions The options to configure the HTTP request before HTTP client sends it.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws ClientAuthenticationException thrown if the request is rejected by server on status code 401.
* @throws ResourceNotFoundException thrown if the request is rejected by server on status code 404.
* @throws ResourceModifiedException thrown if the request is rejected by server on status code 409.
* @return chat completions for the provided chat messages. Completions support a wide variety of tasks and generate
* text that continues from or "completes" provided prompt data along with {@link Response}.
*/
@Generated
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<BinaryData> getChatCompletionsWithResponse(
String deploymentId, BinaryData chatCompletionsOptions, RequestOptions requestOptions) {
return this.client.getChatCompletionsWithResponse(deploymentId, chatCompletionsOptions, requestOptions).block();
}
/**
* Return the embeddings for a given prompt.
*
* @param deploymentId deployment id of the deployed model.
* @param embeddingsOptions The configuration information for an embeddings request. Embeddings measure the
* relatedness of text strings and are commonly used for search, clustering, recommendations, and other similar
* scenarios.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws ClientAuthenticationException thrown if the request is rejected by server on status code 401.
* @throws ResourceNotFoundException thrown if the request is rejected by server on status code 404.
* @throws ResourceModifiedException thrown if the request is rejected by server on status code 409.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return representation of the response data from an embeddings request. Embeddings measure the relatedness of
* text strings and are commonly used for search, clustering, recommendations, and other similar scenarios.
*/
@Generated
@ServiceMethod(returns = ReturnType.SINGLE)
public Embeddings getEmbeddings(String deploymentId, EmbeddingsOptions embeddingsOptions) {
RequestOptions requestOptions = new RequestOptions();
return getEmbeddingsWithResponse(deploymentId, BinaryData.fromObject(embeddingsOptions), requestOptions)
.getValue()
.toObject(Embeddings.class);
}
/**
* Gets completions for the provided input prompts. Completions support a wide variety of tasks and generate text
* that continues from or "completes" provided prompt data.
*
* @param deploymentId deployment id of the deployed model.
* @param completionsOptions The configuration information for a completions request. Completions support a wide
* variety of tasks and generate text that continues from or "completes" provided prompt data.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws ClientAuthenticationException thrown if the request is rejected by server on status code 401.
* @throws ResourceNotFoundException thrown if the request is rejected by server on status code 404.
* @throws ResourceModifiedException thrown if the request is rejected by server on status code 409.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return completions for the provided input prompts. Completions support a wide variety of tasks and generate text
* that continues from or "completes" provided prompt data.
*/
@Generated
@ServiceMethod(returns = ReturnType.SINGLE)
public Completions getCompletions(String deploymentId, CompletionsOptions completionsOptions) {
RequestOptions requestOptions = new RequestOptions();
return getCompletionsWithResponse(deploymentId, BinaryData.fromObject(completionsOptions), requestOptions)
.getValue()
.toObject(Completions.class);
}
/**
* Gets completions for the provided input prompt. Completions support a wide variety of tasks and generate text
* that continues from or "completes" provided prompt data.
*
* @param deploymentId deployment id of the deployed model.
* @param prompt The prompt to generate completion text from.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws ClientAuthenticationException thrown if the request is rejected by server on status code 401.
* @throws ResourceNotFoundException thrown if the request is rejected by server on status code 404.
* @throws ResourceModifiedException thrown if the request is rejected by server on status code 409.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return completions for the provided input prompts. Completions support a wide variety of tasks and generate text
* that continues from or "completes" provided prompt data.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
/**
* Gets completions as a stream for the provided input prompts. Completions support a wide variety of tasks and
* generate text that continues from or "completes" provided prompt data.
*
* @param deploymentId deployment id of the deployed model.
* @param completionsOptions The configuration information for a completions request. Completions support a wide
* variety of tasks and generate text that continues from or "completes" provided prompt data.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws ClientAuthenticationException thrown if the request is rejected by server on status code 401.
* @throws ResourceNotFoundException thrown if the request is rejected by server on status code 404.
* @throws ResourceModifiedException thrown if the request is rejected by server on status code 409.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return an {@link IterableStream} of completions for the provided input prompts. Completions support a wide
* variety of tasks and generate text that continues from or "completes" provided prompt data.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public IterableStream<Completions> getCompletionsStream(
String deploymentId, CompletionsOptions completionsOptions) {
RequestOptions requestOptions = new RequestOptions();
Flux<ByteBuffer> responseStream =
getCompletionsWithResponse(deploymentId, BinaryData.fromObject(completionsOptions), requestOptions)
.getValue()
.toFluxByteBuffer();
OpenAIServerSentEvents<Completions> completionsStream =
new OpenAIServerSentEvents<>(responseStream, Completions.class);
return new IterableStream<>(completionsStream.getEvents());
}
/**
* Gets chat completions for the provided chat messages. Completions support a wide variety of tasks and generate
* text that continues from or "completes" provided prompt data.
*
* @param deploymentId deployment id of the deployed model.
* @param chatCompletionsOptions The configuration information for a chat completions request. Completions support a
* wide variety of tasks and generate text that continues from or "completes" provided prompt data.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws ClientAuthenticationException thrown if the request is rejected by server on status code 401.
* @throws ResourceNotFoundException thrown if the request is rejected by server on status code 404.
* @throws ResourceModifiedException thrown if the request is rejected by server on status code 409.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return chat completions for the provided chat messages. Completions support a wide variety of tasks and generate
* text that continues from or "completes" provided prompt data.
*/
@Generated
@ServiceMethod(returns = ReturnType.SINGLE)
public ChatCompletions getChatCompletions(String deploymentId, ChatCompletionsOptions chatCompletionsOptions) {
RequestOptions requestOptions = new RequestOptions();
return getChatCompletionsWithResponse(
deploymentId, BinaryData.fromObject(chatCompletionsOptions), requestOptions)
.getValue()
.toObject(ChatCompletions.class);
}
} |
To avoid reading the Kafka metric after producer is closed (not sure what the semantics is here) ```suggestion timeService.registerProcessingTimer( lastSync + METRIC_UPDATE_INTERVAL_MILLIS, (time) -> { if (!closed) { MetricUtil.sync(byteOutMetric, numBytesOutCounter); lastSync = time; registerMetricSync(); } }); ``` It might be easier to just use `System.currentTimeMillis()` instead of `lastSync` to not trigger stuff to often in case of overload? But not sure. | private void registerMetricSync() {
if (closed) {
return;
}
timeService.registerProcessingTimer(
lastSync + METRIC_UPDATE_INTERVAL_MILLIS,
(time) -> {
MetricUtil.sync(byteOutMetric, numBytesOutCounter);
lastSync = time;
registerMetricSync();
});
} | }); | private void registerMetricSync() {
timeService.registerProcessingTimer(
lastSync + METRIC_UPDATE_INTERVAL_MILLIS,
(time) -> {
if (closed) {
return;
}
MetricUtil.sync(byteOutMetric, numBytesOutCounter);
lastSync = time;
registerMetricSync();
});
} | class KafkaWriter<IN> implements SinkWriter<IN, KafkaCommittable, KafkaWriterState> {
private static final Logger LOG = LoggerFactory.getLogger(KafkaWriter.class);
private static final String KEY_DISABLE_METRICS = "flink.disable-metrics";
private static final String KAFKA_PRODUCER_METRIC_NAME = "KafkaProducer";
private static final long METRIC_UPDATE_INTERVAL_MILLIS = 500;
private final DeliveryGuarantee deliveryGuarantee;
private final Properties kafkaProducerConfig;
private final String transactionalIdPrefix;
private final KafkaRecordSerializationSchema<IN> recordSerializer;
private final Callback deliveryCallback;
private final AtomicLong pendingRecords = new AtomicLong();
private final KafkaRecordSerializationSchema.KafkaSinkContext kafkaSinkContext;
private final List<FlinkKafkaInternalProducer<byte[], byte[]>> producers = new ArrayList<>();
private final Map<String, KafkaMetricMutableWrapper> previouslyCreatedMetrics = new HashMap<>();
private final SinkWriterMetricGroup metricGroup;
private final Counter numBytesOutCounter;
private final Sink.ProcessingTimeService timeService;
private transient Metric byteOutMetric;
private transient FlinkKafkaInternalProducer<byte[], byte[]> currentProducer;
private transient KafkaWriterState kafkaWriterState;
@Nullable private transient volatile Exception producerAsyncException;
private boolean closed = false;
private long lastSync = System.currentTimeMillis();
/**
* Constructor creating a kafka writer.
*
* <p>It will throw a {@link RuntimeException} if {@link
* KafkaRecordSerializationSchema
* KafkaRecordSerializationSchema.KafkaSinkContext)} fails.
*
* @param deliveryGuarantee the Sink's delivery guarantee
* @param kafkaProducerConfig the properties to configure the {@link FlinkKafkaInternalProducer}
* @param transactionalIdPrefix used to create the transactionalIds
* @param sinkInitContext context to provide information about the runtime environment
* @param recordSerializer serialize to transform the incoming records to {@link ProducerRecord}
* @param schemaContext context used to initialize the {@link KafkaRecordSerializationSchema}
* @param recoveredStates state from an previous execution which was covered
*/
KafkaWriter(
DeliveryGuarantee deliveryGuarantee,
Properties kafkaProducerConfig,
String transactionalIdPrefix,
Sink.InitContext sinkInitContext,
KafkaRecordSerializationSchema<IN> recordSerializer,
SerializationSchema.InitializationContext schemaContext,
List<KafkaWriterState> recoveredStates) {
this.deliveryGuarantee = checkNotNull(deliveryGuarantee, "deliveryGuarantee");
this.kafkaProducerConfig = checkNotNull(kafkaProducerConfig, "kafkaProducerConfig");
this.transactionalIdPrefix = checkNotNull(transactionalIdPrefix, "transactionalIdPrefix");
this.recordSerializer = checkNotNull(recordSerializer, "recordSerializer");
this.deliveryCallback =
(metadata, exception) -> {
if (exception != null && producerAsyncException == null) {
producerAsyncException = exception;
}
acknowledgeMessage();
};
checkNotNull(sinkInitContext, "sinkInitContext");
this.timeService = sinkInitContext.getProcessingTimeService();
this.metricGroup = sinkInitContext.metricGroup();
this.numBytesOutCounter = metricGroup.getIOMetricGroup().getNumBytesOutCounter();
this.kafkaSinkContext =
new DefaultKafkaSinkContext(
sinkInitContext.getSubtaskId(),
sinkInitContext.getNumberOfParallelSubtasks(),
kafkaProducerConfig);
try {
recordSerializer.open(schemaContext, kafkaSinkContext);
} catch (Exception e) {
throw new FlinkRuntimeException("Cannot initialize schema.", e);
}
this.kafkaWriterState =
recoverAndInitializeState(checkNotNull(recoveredStates, "recoveredStates"));
this.currentProducer = beginTransaction();
producers.add(currentProducer);
registerMetricSync();
}
@Override
public void write(IN element, Context context) throws IOException {
checkErroneous();
final ProducerRecord<byte[], byte[]> record =
recordSerializer.serialize(element, kafkaSinkContext, context.timestamp());
pendingRecords.incrementAndGet();
currentProducer.send(record, deliveryCallback);
}
@Override
public List<KafkaCommittable> prepareCommit(boolean flush) throws IOException {
flushRecords(flush);
if (!flush) {
currentProducer = beginTransaction();
}
final List<KafkaCommittable> committables = commit();
producers.add(currentProducer);
return committables;
}
@Override
public List<KafkaWriterState> snapshotState() throws IOException {
return ImmutableList.of(kafkaWriterState);
}
@Override
public void close() throws Exception {
closed = true;
currentProducer.close(Duration.ZERO);
}
private KafkaWriterState recoverAndInitializeState(List<KafkaWriterState> recoveredStates) {
final int subtaskId = kafkaSinkContext.getParallelInstanceId();
if (recoveredStates.isEmpty()) {
final KafkaWriterState state =
new KafkaWriterState(transactionalIdPrefix, subtaskId, 0);
abortTransactions(getTransactionsToAbort(state, new ArrayList<>()));
return state;
}
final Map<Integer, KafkaWriterState> taskOffsetMapping =
recoveredStates.stream()
.collect(
Collectors.toMap(
KafkaWriterState::getSubtaskId, Function.identity()));
checkState(
taskOffsetMapping.containsKey(subtaskId),
"Internal error: It is expected that state from previous executions is distributed to the same subtask id.");
final KafkaWriterState lastState = taskOffsetMapping.get(subtaskId);
taskOffsetMapping.remove(subtaskId);
abortTransactions(
getTransactionsToAbort(lastState, new ArrayList<>(taskOffsetMapping.values())));
if (!lastState.getTransactionalIdPrefix().equals(transactionalIdPrefix)) {
LOG.warn(
"Transactional id prefix from previous execution {} has changed to {}.",
lastState.getTransactionalIdPrefix(),
transactionalIdPrefix);
return new KafkaWriterState(transactionalIdPrefix, subtaskId, 0);
}
return new KafkaWriterState(
transactionalIdPrefix, subtaskId, lastState.getTransactionalIdOffset());
}
private void abortTransactions(List<String> transactionsToAbort) {
transactionsToAbort.forEach(
transaction -> {
final Properties myConfig = new Properties();
myConfig.putAll(kafkaProducerConfig);
myConfig.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, transaction);
LOG.info("Aborting Kafka transaction {}.", transaction);
FlinkKafkaInternalProducer<byte[], byte[]> kafkaProducer = null;
try {
kafkaProducer = new FlinkKafkaInternalProducer<>(myConfig);
kafkaProducer.initTransactions();
} finally {
if (kafkaProducer != null) {
kafkaProducer.close(Duration.ofSeconds(0));
}
}
});
}
private void acknowledgeMessage() {
pendingRecords.decrementAndGet();
}
private void checkErroneous() {
Exception e = producerAsyncException;
if (e != null) {
producerAsyncException = null;
throw new RuntimeException("Failed to send data to Kafka: " + e.getMessage(), e);
}
}
private FlinkKafkaInternalProducer<byte[], byte[]> beginTransaction() {
switch (deliveryGuarantee) {
case EXACTLY_ONCE:
if (currentProducer != null) {
currentProducer.close(Duration.ZERO);
}
final FlinkKafkaInternalProducer<byte[], byte[]> transactionalProducer =
createTransactionalProducer();
initMetrics(transactionalProducer);
transactionalProducer.beginTransaction();
return transactionalProducer;
case AT_LEAST_ONCE:
case NONE:
if (currentProducer == null) {
final FlinkKafkaInternalProducer<byte[], byte[]> producer =
new FlinkKafkaInternalProducer<>(kafkaProducerConfig);
initMetrics(producer);
return producer;
}
LOG.debug("Reusing existing KafkaProducer");
return currentProducer;
default:
throw new UnsupportedOperationException(
"Unsupported Kafka writer semantic " + deliveryGuarantee);
}
}
private void flushRecords(boolean finalFlush) {
switch (deliveryGuarantee) {
case EXACTLY_ONCE:
case AT_LEAST_ONCE:
currentProducer.flush();
final long pendingRecordsCount = pendingRecords.get();
if (pendingRecordsCount != 0) {
throw new IllegalStateException(
"Pending record count must be zero at this point: "
+ pendingRecordsCount);
}
break;
case NONE:
if (finalFlush) {
currentProducer.flush();
}
break;
default:
throw new UnsupportedOperationException(
"Unsupported Kafka writer semantic " + deliveryGuarantee);
}
checkErroneous();
}
private List<KafkaCommittable> commit() {
final List<KafkaCommittable> committables;
switch (deliveryGuarantee) {
case EXACTLY_ONCE:
committables =
producers.stream().map(KafkaCommittable::of).collect(Collectors.toList());
producers.clear();
break;
case AT_LEAST_ONCE:
case NONE:
committables = new ArrayList<>();
break;
default:
throw new UnsupportedOperationException(
"Unsupported Kafka writer semantic " + deliveryGuarantee);
}
LOG.info("Committing {} committables.", committables);
return committables;
}
/**
* For each checkpoint we create new {@link FlinkKafkaInternalProducer} so that new transactions
* will not clash with transactions created during previous checkpoints ({@code
* producer.initTransactions()} assures that we obtain new producerId and epoch counters).
*/
private FlinkKafkaInternalProducer<byte[], byte[]> createTransactionalProducer() {
final long transactionalIdOffset = kafkaWriterState.getTransactionalIdOffset() + 1;
final Properties copiedProducerConfig = new Properties();
copiedProducerConfig.putAll(kafkaProducerConfig);
initTransactionalProducerConfig(
copiedProducerConfig,
transactionalIdOffset,
transactionalIdPrefix,
kafkaSinkContext.getParallelInstanceId());
final FlinkKafkaInternalProducer<byte[], byte[]> producer =
new FlinkKafkaInternalProducer<>(copiedProducerConfig);
producer.initTransactions();
kafkaWriterState =
new KafkaWriterState(
transactionalIdPrefix,
kafkaSinkContext.getParallelInstanceId(),
transactionalIdOffset);
LOG.info(
"Created new transactional producer {}",
copiedProducerConfig.get(ProducerConfig.TRANSACTIONAL_ID_CONFIG));
return producer;
}
private static void initTransactionalProducerConfig(
Properties producerConfig,
long transactionalIdOffset,
String transactionalIdPrefix,
int subtaskId) {
producerConfig.put(
ProducerConfig.TRANSACTIONAL_ID_CONFIG,
TransactionalIdFactory.buildTransactionalId(
transactionalIdPrefix, subtaskId, transactionalIdOffset));
}
private void initMetrics(FlinkKafkaInternalProducer<byte[], byte[]> producer) {
byteOutMetric =
MetricUtil.getKafkaMetric(
producer.metrics(), "producer-metrics", "outgoing-byte-total");
metricGroup.setCurrentSendTimeGauge(() -> computeSendTime(producer));
if (producer.getKafkaProducerConfig().containsKey(KEY_DISABLE_METRICS)
&& Boolean.parseBoolean(
producer.getKafkaProducerConfig().get(KEY_DISABLE_METRICS).toString())) {
return;
}
final MetricGroup kafkaMetricGroup = metricGroup.addGroup(KAFKA_PRODUCER_METRIC_NAME);
producer.metrics().entrySet().forEach(initMetric(kafkaMetricGroup));
}
private Consumer<Map.Entry<MetricName, ? extends Metric>> initMetric(
MetricGroup kafkaMetricGroup) {
return (entry) -> {
final String name = entry.getKey().name();
final Metric metric = entry.getValue();
if (previouslyCreatedMetrics.containsKey(name)) {
final KafkaMetricMutableWrapper wrapper = previouslyCreatedMetrics.get(name);
wrapper.setKafkaMetric(metric);
} else {
final KafkaMetricMutableWrapper wrapper = new KafkaMetricMutableWrapper(metric);
previouslyCreatedMetrics.put(name, wrapper);
kafkaMetricGroup.gauge(name, wrapper);
}
};
}
private List<String> getTransactionsToAbort(
KafkaWriterState main, List<KafkaWriterState> others) {
try (final KafkaTransactionLog log =
new KafkaTransactionLog(
kafkaProducerConfig,
main,
others,
kafkaSinkContext.getNumberOfParallelInstances())) {
return log.getTransactionsToAbort();
} catch (KafkaException e) {
LOG.warn(
"Cannot abort transactions before startup e.g. the job has no access to the "
+ "__transaction_state topic. Lingering transactions may hold new "
+ "data back from downstream consumers. Please abort these "
+ "transactions manually.",
e);
return Collections.emptyList();
}
}
private static long computeSendTime(Producer<?, ?> producer) {
final Metric sendTime =
MetricUtil.getKafkaMetric(
producer.metrics(), "producer-metrics", "request-latency-avg");
final Metric queueTime =
MetricUtil.getKafkaMetric(
producer.metrics(), "producer-metrics", "record-queue-time-avg");
return ((Number) sendTime.metricValue()).longValue()
+ ((Number) queueTime.metricValue()).longValue();
}
} | class KafkaWriter<IN> implements SinkWriter<IN, KafkaCommittable, KafkaWriterState> {
private static final Logger LOG = LoggerFactory.getLogger(KafkaWriter.class);
private static final String KEY_DISABLE_METRICS = "flink.disable-metrics";
private static final String KAFKA_PRODUCER_METRIC_NAME = "KafkaProducer";
private static final long METRIC_UPDATE_INTERVAL_MILLIS = 500;
private final DeliveryGuarantee deliveryGuarantee;
private final Properties kafkaProducerConfig;
private final String transactionalIdPrefix;
private final KafkaRecordSerializationSchema<IN> recordSerializer;
private final Callback deliveryCallback;
private final AtomicLong pendingRecords = new AtomicLong();
private final KafkaRecordSerializationSchema.KafkaSinkContext kafkaSinkContext;
private final List<FlinkKafkaInternalProducer<byte[], byte[]>> producers = new ArrayList<>();
private final Map<String, KafkaMetricMutableWrapper> previouslyCreatedMetrics = new HashMap<>();
private final SinkWriterMetricGroup metricGroup;
private final Counter numBytesOutCounter;
private final Sink.ProcessingTimeService timeService;
private transient Metric byteOutMetric;
private transient FlinkKafkaInternalProducer<byte[], byte[]> currentProducer;
private transient KafkaWriterState kafkaWriterState;
@Nullable private transient volatile Exception producerAsyncException;
private boolean closed = false;
private long lastSync = System.currentTimeMillis();
/**
* Constructor creating a kafka writer.
*
* <p>It will throw a {@link RuntimeException} if {@link
* KafkaRecordSerializationSchema
* KafkaRecordSerializationSchema.KafkaSinkContext)} fails.
*
* @param deliveryGuarantee the Sink's delivery guarantee
* @param kafkaProducerConfig the properties to configure the {@link FlinkKafkaInternalProducer}
* @param transactionalIdPrefix used to create the transactionalIds
* @param sinkInitContext context to provide information about the runtime environment
* @param recordSerializer serialize to transform the incoming records to {@link ProducerRecord}
* @param schemaContext context used to initialize the {@link KafkaRecordSerializationSchema}
* @param recoveredStates state from an previous execution which was covered
*/
KafkaWriter(
DeliveryGuarantee deliveryGuarantee,
Properties kafkaProducerConfig,
String transactionalIdPrefix,
Sink.InitContext sinkInitContext,
KafkaRecordSerializationSchema<IN> recordSerializer,
SerializationSchema.InitializationContext schemaContext,
List<KafkaWriterState> recoveredStates) {
this.deliveryGuarantee = checkNotNull(deliveryGuarantee, "deliveryGuarantee");
this.kafkaProducerConfig = checkNotNull(kafkaProducerConfig, "kafkaProducerConfig");
this.transactionalIdPrefix = checkNotNull(transactionalIdPrefix, "transactionalIdPrefix");
this.recordSerializer = checkNotNull(recordSerializer, "recordSerializer");
this.deliveryCallback =
(metadata, exception) -> {
if (exception != null && producerAsyncException == null) {
producerAsyncException = exception;
}
acknowledgeMessage();
};
checkNotNull(sinkInitContext, "sinkInitContext");
this.timeService = sinkInitContext.getProcessingTimeService();
this.metricGroup = sinkInitContext.metricGroup();
this.numBytesOutCounter = metricGroup.getIOMetricGroup().getNumBytesOutCounter();
this.kafkaSinkContext =
new DefaultKafkaSinkContext(
sinkInitContext.getSubtaskId(),
sinkInitContext.getNumberOfParallelSubtasks(),
kafkaProducerConfig);
try {
recordSerializer.open(schemaContext, kafkaSinkContext);
} catch (Exception e) {
throw new FlinkRuntimeException("Cannot initialize schema.", e);
}
this.kafkaWriterState =
recoverAndInitializeState(checkNotNull(recoveredStates, "recoveredStates"));
this.currentProducer = beginTransaction();
producers.add(currentProducer);
registerMetricSync();
}
@Override
public void write(IN element, Context context) throws IOException {
checkErroneous();
final ProducerRecord<byte[], byte[]> record =
recordSerializer.serialize(element, kafkaSinkContext, context.timestamp());
pendingRecords.incrementAndGet();
currentProducer.send(record, deliveryCallback);
}
@Override
public List<KafkaCommittable> prepareCommit(boolean flush) throws IOException {
flushRecords(flush);
if (!flush) {
currentProducer = beginTransaction();
}
final List<KafkaCommittable> committables = commit();
producers.add(currentProducer);
return committables;
}
@Override
public List<KafkaWriterState> snapshotState() throws IOException {
return ImmutableList.of(kafkaWriterState);
}
@Override
public void close() throws Exception {
closed = true;
currentProducer.close(Duration.ZERO);
}
private KafkaWriterState recoverAndInitializeState(List<KafkaWriterState> recoveredStates) {
final int subtaskId = kafkaSinkContext.getParallelInstanceId();
if (recoveredStates.isEmpty()) {
final KafkaWriterState state =
new KafkaWriterState(transactionalIdPrefix, subtaskId, 0);
abortTransactions(getTransactionsToAbort(state, new ArrayList<>()));
return state;
}
final Map<Integer, KafkaWriterState> taskOffsetMapping =
recoveredStates.stream()
.collect(
Collectors.toMap(
KafkaWriterState::getSubtaskId, Function.identity()));
checkState(
taskOffsetMapping.containsKey(subtaskId),
"Internal error: It is expected that state from previous executions is distributed to the same subtask id.");
final KafkaWriterState lastState = taskOffsetMapping.get(subtaskId);
taskOffsetMapping.remove(subtaskId);
abortTransactions(
getTransactionsToAbort(lastState, new ArrayList<>(taskOffsetMapping.values())));
if (!lastState.getTransactionalIdPrefix().equals(transactionalIdPrefix)) {
LOG.warn(
"Transactional id prefix from previous execution {} has changed to {}.",
lastState.getTransactionalIdPrefix(),
transactionalIdPrefix);
return new KafkaWriterState(transactionalIdPrefix, subtaskId, 0);
}
return new KafkaWriterState(
transactionalIdPrefix, subtaskId, lastState.getTransactionalIdOffset());
}
private void abortTransactions(List<String> transactionsToAbort) {
transactionsToAbort.forEach(
transaction -> {
final Properties myConfig = new Properties();
myConfig.putAll(kafkaProducerConfig);
myConfig.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, transaction);
LOG.info("Aborting Kafka transaction {}.", transaction);
FlinkKafkaInternalProducer<byte[], byte[]> kafkaProducer = null;
try {
kafkaProducer = new FlinkKafkaInternalProducer<>(myConfig);
kafkaProducer.initTransactions();
} finally {
if (kafkaProducer != null) {
kafkaProducer.close(Duration.ofSeconds(0));
}
}
});
}
private void acknowledgeMessage() {
pendingRecords.decrementAndGet();
}
private void checkErroneous() {
Exception e = producerAsyncException;
if (e != null) {
producerAsyncException = null;
throw new RuntimeException("Failed to send data to Kafka: " + e.getMessage(), e);
}
}
private FlinkKafkaInternalProducer<byte[], byte[]> beginTransaction() {
switch (deliveryGuarantee) {
case EXACTLY_ONCE:
if (currentProducer != null) {
currentProducer.close(Duration.ZERO);
}
final FlinkKafkaInternalProducer<byte[], byte[]> transactionalProducer =
createTransactionalProducer();
initMetrics(transactionalProducer);
transactionalProducer.beginTransaction();
return transactionalProducer;
case AT_LEAST_ONCE:
case NONE:
if (currentProducer == null) {
final FlinkKafkaInternalProducer<byte[], byte[]> producer =
new FlinkKafkaInternalProducer<>(kafkaProducerConfig);
initMetrics(producer);
return producer;
}
LOG.debug("Reusing existing KafkaProducer");
return currentProducer;
default:
throw new UnsupportedOperationException(
"Unsupported Kafka writer semantic " + deliveryGuarantee);
}
}
private void flushRecords(boolean finalFlush) {
switch (deliveryGuarantee) {
case EXACTLY_ONCE:
case AT_LEAST_ONCE:
currentProducer.flush();
final long pendingRecordsCount = pendingRecords.get();
if (pendingRecordsCount != 0) {
throw new IllegalStateException(
"Pending record count must be zero at this point: "
+ pendingRecordsCount);
}
break;
case NONE:
if (finalFlush) {
currentProducer.flush();
}
break;
default:
throw new UnsupportedOperationException(
"Unsupported Kafka writer semantic " + deliveryGuarantee);
}
checkErroneous();
}
private List<KafkaCommittable> commit() {
final List<KafkaCommittable> committables;
switch (deliveryGuarantee) {
case EXACTLY_ONCE:
committables =
producers.stream().map(KafkaCommittable::of).collect(Collectors.toList());
producers.clear();
break;
case AT_LEAST_ONCE:
case NONE:
committables = new ArrayList<>();
break;
default:
throw new UnsupportedOperationException(
"Unsupported Kafka writer semantic " + deliveryGuarantee);
}
LOG.info("Committing {} committables.", committables);
return committables;
}
/**
* For each checkpoint we create new {@link FlinkKafkaInternalProducer} so that new transactions
* will not clash with transactions created during previous checkpoints ({@code
* producer.initTransactions()} assures that we obtain new producerId and epoch counters).
*/
private FlinkKafkaInternalProducer<byte[], byte[]> createTransactionalProducer() {
final long transactionalIdOffset = kafkaWriterState.getTransactionalIdOffset() + 1;
final Properties copiedProducerConfig = new Properties();
copiedProducerConfig.putAll(kafkaProducerConfig);
initTransactionalProducerConfig(
copiedProducerConfig,
transactionalIdOffset,
transactionalIdPrefix,
kafkaSinkContext.getParallelInstanceId());
final FlinkKafkaInternalProducer<byte[], byte[]> producer =
new FlinkKafkaInternalProducer<>(copiedProducerConfig);
producer.initTransactions();
kafkaWriterState =
new KafkaWriterState(
transactionalIdPrefix,
kafkaSinkContext.getParallelInstanceId(),
transactionalIdOffset);
LOG.info(
"Created new transactional producer {}",
copiedProducerConfig.get(ProducerConfig.TRANSACTIONAL_ID_CONFIG));
return producer;
}
private static void initTransactionalProducerConfig(
Properties producerConfig,
long transactionalIdOffset,
String transactionalIdPrefix,
int subtaskId) {
producerConfig.put(
ProducerConfig.TRANSACTIONAL_ID_CONFIG,
TransactionalIdFactory.buildTransactionalId(
transactionalIdPrefix, subtaskId, transactionalIdOffset));
}
private void initMetrics(FlinkKafkaInternalProducer<byte[], byte[]> producer) {
byteOutMetric =
MetricUtil.getKafkaMetric(
producer.metrics(), "producer-metrics", "outgoing-byte-total");
metricGroup.setCurrentSendTimeGauge(() -> computeSendTime(producer));
if (producer.getKafkaProducerConfig().containsKey(KEY_DISABLE_METRICS)
&& Boolean.parseBoolean(
producer.getKafkaProducerConfig().get(KEY_DISABLE_METRICS).toString())) {
return;
}
final MetricGroup kafkaMetricGroup = metricGroup.addGroup(KAFKA_PRODUCER_METRIC_NAME);
producer.metrics().entrySet().forEach(initMetric(kafkaMetricGroup));
}
private Consumer<Map.Entry<MetricName, ? extends Metric>> initMetric(
MetricGroup kafkaMetricGroup) {
return (entry) -> {
final String name = entry.getKey().name();
final Metric metric = entry.getValue();
if (previouslyCreatedMetrics.containsKey(name)) {
final KafkaMetricMutableWrapper wrapper = previouslyCreatedMetrics.get(name);
wrapper.setKafkaMetric(metric);
} else {
final KafkaMetricMutableWrapper wrapper = new KafkaMetricMutableWrapper(metric);
previouslyCreatedMetrics.put(name, wrapper);
kafkaMetricGroup.gauge(name, wrapper);
}
};
}
private List<String> getTransactionsToAbort(
KafkaWriterState main, List<KafkaWriterState> others) {
try (final KafkaTransactionLog log =
new KafkaTransactionLog(
kafkaProducerConfig,
main,
others,
kafkaSinkContext.getNumberOfParallelInstances())) {
return log.getTransactionsToAbort();
} catch (KafkaException e) {
LOG.warn(
"Cannot abort transactions before startup e.g. the job has no access to the "
+ "__transaction_state topic. Lingering transactions may hold new "
+ "data back from downstream consumers. Please abort these "
+ "transactions manually.",
e);
return Collections.emptyList();
}
}
private static long computeSendTime(Producer<?, ?> producer) {
final Metric sendTime =
MetricUtil.getKafkaMetric(
producer.metrics(), "producer-metrics", "request-latency-avg");
final Metric queueTime =
MetricUtil.getKafkaMetric(
producer.metrics(), "producer-metrics", "record-queue-time-avg");
return ((Number) sendTime.metricValue()).longValue()
+ ((Number) queueTime.metricValue()).longValue();
}
} |
```suggestion // This header is added to block content sniffing in the old browsers where // the response payload may contain executable scripts // Related issue: ballerina-platform/ballerina-standard-library/issues/5088 response.setHeader(X_CONTENT_TYPE_OPTIONS, NO_SNIFF); ``` | public static HttpCarbonMessage createErrorMessage(String payload, int statusCode) {
HttpCarbonMessage response = HttpUtil.createHttpCarbonMessage(false);
response.waitAndReleaseAllEntities();
if (payload != null) {
payload = lowerCaseTheFirstLetter(payload);
response.addHttpContent(new DefaultLastHttpContent(Unpooled.wrappedBuffer(payload.getBytes())));
response.setHeader(X_CONTENT_TYPE_OPTIONS, NO_SNIFF);
} else {
response.addHttpContent(new DefaultLastHttpContent());
}
setHttpStatusCodes(statusCode, response);
return response;
} | response.setHeader(X_CONTENT_TYPE_OPTIONS, NO_SNIFF); | public static HttpCarbonMessage createErrorMessage(String payload, int statusCode) {
HttpCarbonMessage response = HttpUtil.createHttpCarbonMessage(false);
response.waitAndReleaseAllEntities();
if (payload != null) {
payload = lowerCaseTheFirstLetter(payload);
response.addHttpContent(new DefaultLastHttpContent(Unpooled.wrappedBuffer(payload.getBytes())));
response.setHeader(X_CONTENT_TYPE_OPTIONS, NO_SNIFF);
} else {
response.addHttpContent(new DefaultLastHttpContent());
}
setHttpStatusCodes(statusCode, response);
return response;
} | class HttpUtil {
public static final boolean TRUE = true;
public static final boolean FALSE = false;
private static final Logger log = LoggerFactory.getLogger(HttpUtil.class);
private static final String METHOD_ACCESSED = "isMethodAccessed";
private static final String IO_EXCEPTION_OCCURRED = "I/O exception occurred";
private static final String CHUNKING_CONFIG = "chunking_config";
/**
* Set new entity to in/out request/response struct.
*
* @param httpMessageStruct request/response struct.
* @return created entity.
*/
public static ObjectValue createNewEntity(ObjectValue httpMessageStruct) {
ObjectValue entity = ValueCreatorUtils.createEntityObject();
HttpCarbonMessage httpCarbonMessage = HttpUtil.getCarbonMsg(httpMessageStruct,
HttpUtil.createHttpCarbonMessage(isRequest(httpMessageStruct)));
entity.addNativeData(ENTITY_HEADERS, httpCarbonMessage.getHeaders());
entity.addNativeData(ENTITY_TRAILER_HEADERS, httpCarbonMessage.getTrailerHeaders());
entity.addNativeData(ENTITY_BYTE_CHANNEL, null);
httpMessageStruct.set(isRequest(httpMessageStruct) ? REQUEST_ENTITY_FIELD : RESPONSE_ENTITY_FIELD
, entity);
httpMessageStruct.addNativeData(IS_BODY_BYTE_CHANNEL_ALREADY_SET, false);
return entity;
}
/**
* Set the given entity to request or response message.
*
* @param messageObj Represent ballerina request/response
* @param entityObj Represent an entity
* @param isRequest boolean representing whether the message is a request or a response
*/
public static void setEntity(ObjectValue messageObj, ObjectValue entityObj, boolean isRequest) {
HttpCarbonMessage httpCarbonMessage = HttpUtil.getCarbonMsg(messageObj,
HttpUtil.createHttpCarbonMessage(isRequest));
String contentType = MimeUtil.getContentTypeWithParameters(entityObj);
if (checkEntityBodyAvailability(entityObj)) {
httpCarbonMessage.waitAndReleaseAllEntities();
if (contentType == null) {
contentType = OCTET_STREAM;
}
HeaderUtil.setHeaderToEntity(entityObj, HttpHeaderNames.CONTENT_TYPE.toString(), contentType);
}
messageObj.set(isRequest ? REQUEST_ENTITY_FIELD : RESPONSE_ENTITY_FIELD, entityObj);
messageObj.addNativeData(IS_BODY_BYTE_CHANNEL_ALREADY_SET, checkEntityBodyAvailability(entityObj));
}
/**
* Get the entity from request or response.
*
* @param messageObj Ballerina context
* @param isRequest boolean representing whether the message is a request or a response
* @param entityBodyRequired boolean representing whether the entity body is required
* @return Entity of the request or response
*/
public static ObjectValue getEntity(ObjectValue messageObj, boolean isRequest, boolean entityBodyRequired) {
ObjectValue entity = (ObjectValue) messageObj.get(isRequest ? REQUEST_ENTITY_FIELD : RESPONSE_ENTITY_FIELD);
boolean byteChannelAlreadySet = false;
if (messageObj.getNativeData(IS_BODY_BYTE_CHANNEL_ALREADY_SET) != null) {
byteChannelAlreadySet = (Boolean) messageObj.getNativeData(IS_BODY_BYTE_CHANNEL_ALREADY_SET);
}
if (entityBodyRequired && !byteChannelAlreadySet) {
populateEntityBody(messageObj, entity, isRequest, false);
}
return entity;
}
/**
* Populate entity with the relevant body content.
*
* @param messageObj Represent ballerina request/response
* @param entityObj Represent an entity
* @param request boolean representing whether the message is a request or a response
* @param streaming boolean representing whether the entity requires byte channel or message as native data
*/
public static void populateEntityBody(ObjectValue messageObj, ObjectValue entityObj, boolean request,
boolean streaming) {
HttpCarbonMessage httpCarbonMessage = HttpUtil
.getCarbonMsg(messageObj, HttpUtil.createHttpCarbonMessage(request));
String contentType = httpCarbonMessage.getHeader(HttpHeaderNames.CONTENT_TYPE.toString());
if (MimeUtil.isNotNullAndEmpty(contentType) && contentType.startsWith(MULTIPART_AS_PRIMARY_TYPE)
&& !streaming) {
MultipartDecoder.parseBody(entityObj, contentType,
new HttpMessageDataStreamer(httpCarbonMessage).getInputStream());
} else {
long contentLength = MimeUtil.extractContentLength(httpCarbonMessage);
if (contentLength > 0) {
if (streaming) {
entityObj.addNativeData(ENTITY_BYTE_CHANNEL, new EntityWrapper(
new EntityBodyChannel(new HttpMessageDataStreamer(httpCarbonMessage).getInputStream())));
} else {
entityObj.addNativeData(TRANSPORT_MESSAGE, httpCarbonMessage);
}
} else {
if (HttpHeaderValues.CHUNKED.toString().equals(
httpCarbonMessage.getHeader(HttpHeaderNames.TRANSFER_ENCODING.toString()))) {
entityObj.addNativeData(TRANSPORT_MESSAGE, httpCarbonMessage);
}
}
}
messageObj.set(request ? REQUEST_ENTITY_FIELD : RESPONSE_ENTITY_FIELD, entityObj);
messageObj.addNativeData(IS_BODY_BYTE_CHANNEL_ALREADY_SET, true);
}
public static ObjectValue extractEntity(ObjectValue request) {
Object isEntityBodyAvailable = request.getNativeData(IS_BODY_BYTE_CHANNEL_ALREADY_SET);
if (isEntityBodyAvailable == null || !((Boolean) isEntityBodyAvailable)) {
return null;
}
return (ObjectValue) request.get(isRequest(request) ? REQUEST_ENTITY_FIELD : RESPONSE_ENTITY_FIELD);
}
public static void closeMessageOutputStream(OutputStream messageOutputStream) {
try {
if (messageOutputStream != null) {
messageOutputStream.close();
}
} catch (IOException e) {
log.error("Couldn't close message output stream", e);
}
}
public static void prepareOutboundResponse(ObjectValue connectionObj, HttpCarbonMessage inboundRequestMsg,
HttpCarbonMessage outboundResponseMsg,
ObjectValue outboundResponseObj) {
HttpUtil.checkEntityAvailability(outboundResponseObj);
HttpUtil.addCorsHeaders(inboundRequestMsg, outboundResponseMsg);
HttpUtil.enrichOutboundMessage(outboundResponseMsg, outboundResponseObj);
HttpService httpService = (HttpService) connectionObj.getNativeData(HttpConstants.HTTP_SERVICE);
HttpUtil.setCompressionHeaders(httpService.getCompressionConfig(), inboundRequestMsg, outboundResponseMsg);
HttpUtil.setChunkingHeader(httpService.getChunkingConfig(), outboundResponseMsg);
}
private static void addCorsHeaders(HttpCarbonMessage requestMsg, HttpCarbonMessage responseMsg) {
if (requestMsg.getHeader(HttpHeaderNames.ORIGIN.toString()) != null) {
CorsHeaderGenerator.process(requestMsg, responseMsg, true);
}
}
/**
* This method should never be called directly to send out responses for ballerina HTTP 1.1. Use
* PipeliningHandler's sendPipelinedResponse() method instead.
*
* @param requestMsg Represent the request message
* @param responseMsg Represent the corresponding response
* @return HttpResponseFuture that represent the future results
*/
public static HttpResponseFuture sendOutboundResponse(HttpCarbonMessage requestMsg,
HttpCarbonMessage responseMsg) {
HttpResponseFuture responseFuture;
try {
responseFuture = requestMsg.respond(responseMsg);
} catch (ServerConnectorException e) {
throw new BallerinaConnectorException("Error occurred during response", e);
}
return responseFuture;
}
/**
* Sends an HTTP/2 Server Push message back to the client.
*
* @param requestMsg the request message associated to the server push response
* @param pushResponse the server push message
* @param pushPromise the push promise associated with the server push
* @return the future to get notifications of the operation asynchronously
*/
public static HttpResponseFuture pushResponse(HttpCarbonMessage requestMsg, HttpCarbonMessage pushResponse,
Http2PushPromise pushPromise) {
HttpResponseFuture responseFuture;
try {
responseFuture = requestMsg.pushResponse(pushResponse, pushPromise);
} catch (ServerConnectorException e) {
throw new BallerinaConnectorException("Error occurred while sending a server push message", e);
}
return responseFuture;
}
/**
* Sends an HTTP/2 Push Promise message back to the client.
*
* @param requestMsg the request message associated to the push promise
* @param pushPromise the push promise message
* @return the future to get notifications of the operation asynchronously
*/
public static HttpResponseFuture pushPromise(HttpCarbonMessage requestMsg, Http2PushPromise pushPromise) {
HttpResponseFuture responseFuture;
try {
responseFuture = requestMsg.pushPromise(pushPromise);
} catch (ServerConnectorException e) {
throw new BallerinaConnectorException("Error occurred during response", e);
}
return responseFuture;
}
public static void handleFailure(HttpCarbonMessage requestMessage, BallerinaConnectorException ex) {
String errorMsg = ex.getMessage();
int statusCode = getStatusCode(requestMessage, errorMsg);
sendPipelinedResponse(requestMessage, createErrorMessage(errorMsg, statusCode));
}
static void handleFailure(HttpCarbonMessage requestMessage, ErrorValue error) {
String errorMsg = getErrorMessage(error);
int statusCode = getStatusCode(requestMessage, errorMsg);
ErrorHandlerUtils.printError("error: " + error.getPrintableStackTrace());
sendPipelinedResponse(requestMessage, createErrorMessage(errorMsg, statusCode));
}
private static String getErrorMessage(ErrorValue error) {
MapValue errorDetails = (MapValue) error.getDetails();
if (!errorDetails.isEmpty()) {
return errorDetails.get(HTTP_ERROR_MESSAGE).toString();
}
return error.getReason();
}
private static int getStatusCode(HttpCarbonMessage requestMessage, String errorMsg) {
Integer carbonStatusCode = requestMessage.getHttpStatusCode();
if (carbonStatusCode == null) {
log.error(errorMsg);
return HttpResponseStatus.INTERNAL_SERVER_ERROR.code();
}
return carbonStatusCode;
}
private static String lowerCaseTheFirstLetter(String payload) {
if (!payload.isEmpty()) {
char[] characters = payload.toCharArray();
characters[0] = Character.toLowerCase(characters[0]);
payload = new String(characters);
}
return payload;
}
private static void setHttpStatusCodes(int statusCode, HttpCarbonMessage response) {
HttpHeaders httpHeaders = response.getHeaders();
httpHeaders.set(HttpHeaderNames.CONTENT_TYPE, org.wso2.transport.http.netty.contract.Constants.TEXT_PLAIN);
response.setHttpStatusCode(statusCode);
}
/**
* Get HTTP error value with a given error detail.
*
* @param errMsg Error message
* @return Error value
*/
public static ErrorValue getError(String errMsg) {
MapValue<String, Object> httpErrorRecord = createHttpErrorDetailRecord(errMsg, null);
httpErrorRecord.put(HTTP_ERROR_MESSAGE, errMsg);
return BallerinaErrors.createError(HTTP_ERROR_CODE, httpErrorRecord);
}
/**
* Get error value from throwable.
*
* @param throwable Throwable representing the error.
* @return Error struct
*/
public static ErrorValue getError(Throwable throwable) {
if (throwable instanceof ClientConnectorException) {
return createHttpError(throwable);
}
if (throwable.getMessage() == null) {
return createHttpError(IO_EXCEPTION_OCCURRED);
} else {
return createHttpError(throwable.getMessage());
}
}
public static ErrorValue createHttpError(String errorMessage) {
HttpErrorType errorType = getErrorType(errorMessage);
return createHttpError(errorMessage, errorType);
}
public static ErrorValue createHttpError(Throwable throwable) {
ErrorValue cause;
if (throwable instanceof EndpointTimeOutException) {
return createHttpError(throwable.getMessage(), HttpErrorType.IDLE_TIMEOUT_TRIGGERED);
} else if (throwable instanceof SslException) {
return createHttpError(throwable.getMessage(), HttpErrorType.SSL_ERROR);
} else if (throwable instanceof PromiseRejectedException) {
return createHttpError(throwable.getMessage(), HttpErrorType.HTTP2_CLIENT_ERROR);
} else if (throwable instanceof ConnectionTimedOutException) {
cause = createErrorCause(throwable.getMessage(),
IOConstants.ErrorCode.ConnectionTimedOut.errorCode(),
IO_PACKAGE_ID, DETAIL_RECORD_TYPE_NAME);
return createHttpError("Something wrong with the connection", HttpErrorType.GENERIC_CLIENT_ERROR, cause);
} else if (throwable instanceof ClientConnectorException) {
cause = createErrorCause(throwable.getMessage(),
IOConstants.ErrorCode.GenericError.errorCode(),
IO_PACKAGE_ID, DETAIL_RECORD_TYPE_NAME);
return createHttpError("Something wrong with the connection", HttpErrorType.GENERIC_CLIENT_ERROR, cause);
} else {
return createHttpError(throwable.getMessage(), HttpErrorType.GENERIC_CLIENT_ERROR);
}
}
public static ErrorValue createHttpError(String message, HttpErrorType errorType) {
Map<String, Object> values = new HashMap<>();
values.put(BallerinaErrors.ERROR_MESSAGE_FIELD, message);
MapValue<String, Object> detail =
BallerinaValues.createRecordValue(PROTOCOL_HTTP_PKG_ID, HTTP_ERROR_DETAIL_RECORD, values);
return BallerinaErrors.createError(errorType.getReason(), detail);
}
public static ErrorValue createHttpError(String message, HttpErrorType errorType, ErrorValue cause) {
MapValue<String, Object> detailRecord = createHttpErrorDetailRecord(message, cause);
return BallerinaErrors.createError(errorType.getReason(), detailRecord);
}
private static MapValue<String, Object> createHttpErrorDetailRecord(String message, ErrorValue cause) {
MapValue<String, Object> detail = BallerinaValues.
createRecordValue(PROTOCOL_HTTP_PKG_ID, HTTP_ERROR_DETAIL_RECORD);
return cause == null ? BallerinaValues.createRecord(detail, message) :
BallerinaValues.createRecord(detail, message, cause);
}
private static HttpErrorType getErrorType(String errorMessage) {
if (errorMessage.contains("Idle timeout triggered")) {
return HttpErrorType.IDLE_TIMEOUT_TRIGGERED;
}
switch (errorMessage) {
case REMOTE_SERVER_CLOSED_BEFORE_INITIATING_INBOUND_RESPONSE:
return HttpErrorType.INIT_INBOUND_RESPONSE_FAILED;
case REMOTE_SERVER_CLOSED_WHILE_READING_INBOUND_RESPONSE_HEADERS:
return HttpErrorType.READING_INBOUND_RESPONSE_HEADERS_FAILED;
case REMOTE_SERVER_CLOSED_WHILE_READING_INBOUND_RESPONSE_BODY:
return HttpErrorType.READING_INBOUND_RESPONSE_BODY_FAILED;
case REMOTE_SERVER_CLOSED_BEFORE_INITIATING_OUTBOUND_REQUEST:
return HttpErrorType.INIT_OUTBOUND_REQUEST_FAILED;
case REMOTE_SERVER_CLOSED_WHILE_WRITING_OUTBOUND_REQUEST_HEADERS:
return HttpErrorType.WRITING_OUTBOUND_REQUEST_HEADER_FAILED;
case REMOTE_SERVER_CLOSED_WHILE_WRITING_OUTBOUND_REQUEST_BODY:
return HttpErrorType.WRITING_OUTBOUND_REQUEST_BODY_FAILED;
case REMOTE_CLIENT_CLOSED_BEFORE_INITIATING_INBOUND_REQUEST:
return HttpErrorType.INIT_INBOUND_REQUEST_FAILED;
case REMOTE_CLIENT_CLOSED_WHILE_READING_INBOUND_REQUEST_HEADERS:
return HttpErrorType.READING_INBOUND_REQUEST_HEADER_FAILED;
case REMOTE_CLIENT_CLOSED_WHILE_READING_INBOUND_REQUEST_BODY:
return HttpErrorType.READING_INBOUND_REQUEST_BODY_FAILED;
case REMOTE_CLIENT_CLOSED_BEFORE_INITIATING_OUTBOUND_RESPONSE:
return HttpErrorType.INIT_OUTBOUND_RESPONSE_FAILED;
case REMOTE_CLIENT_CLOSED_WHILE_WRITING_OUTBOUND_RESPONSE_HEADERS:
return HttpErrorType.WRITING_OUTBOUND_RESPONSE_HEADERS_FAILED;
case REMOTE_CLIENT_CLOSED_WHILE_WRITING_OUTBOUND_RESPONSE_BODY:
return HttpErrorType.WRITING_OUTBOUND_RESPONSE_BODY_FAILED;
case REMOTE_CLIENT_CLOSED_BEFORE_INITIATING_100_CONTINUE_RESPONSE:
return HttpErrorType.INIT_100_CONTINUE_RESPONSE_FAILED;
case REMOTE_CLIENT_CLOSED_WHILE_WRITING_100_CONTINUE_RESPONSE:
return HttpErrorType.WRITING_100_CONTINUE_RESPONSE_FAILED;
case PROMISED_STREAM_REJECTED_ERROR:
return HttpErrorType.HTTP2_CLIENT_ERROR;
default:
return HttpErrorType.GENERIC_CLIENT_ERROR;
}
}
private static ErrorValue createErrorCause(String message, String reason, BPackage packageName, String recordName) {
MapValue<String, Object> detailRecordType = BallerinaValues.createRecordValue(packageName, recordName);
MapValue<String, Object> detailRecord = BallerinaValues.createRecord(detailRecordType, message, null);
return BallerinaErrors.createError(reason, detailRecord);
}
public static HttpCarbonMessage getCarbonMsg(ObjectValue objectValue, HttpCarbonMessage defaultMsg) {
HttpCarbonMessage httpCarbonMessage = (HttpCarbonMessage) objectValue.getNativeData(TRANSPORT_MESSAGE);
if (httpCarbonMessage != null) {
return httpCarbonMessage;
}
addCarbonMsg(objectValue, defaultMsg);
return defaultMsg;
}
/**
* Gets the {@code Http2PushPromise} represented by the PushPromise object.
*
* @param pushPromiseObj the push promise object
* @param defaultPushPromise the Http2PushPromise to use if the object does not have native data of a push promise
* @return the {@code Http2PushPromise} represented by the PushPromise object
*/
public static Http2PushPromise getPushPromise(ObjectValue pushPromiseObj, Http2PushPromise defaultPushPromise) {
Http2PushPromise pushPromise =
(Http2PushPromise) pushPromiseObj.getNativeData(HttpConstants.TRANSPORT_PUSH_PROMISE);
if (pushPromise != null) {
return pushPromise;
}
pushPromiseObj.addNativeData(HttpConstants.TRANSPORT_PUSH_PROMISE, defaultPushPromise);
return defaultPushPromise;
}
/**
* Populates the push promise object from native {@code Http2PushPromise}.
* @param pushPromiseObj the push promise object
* @param pushPromise the native Http2PushPromise
*/
public static void populatePushPromiseStruct(ObjectValue pushPromiseObj,
Http2PushPromise pushPromise) {
pushPromiseObj.addNativeData(HttpConstants.TRANSPORT_PUSH_PROMISE, pushPromise);
pushPromiseObj.set(HttpConstants.PUSH_PROMISE_PATH_FIELD, pushPromise.getPath());
pushPromiseObj.set(HttpConstants.PUSH_PROMISE_METHOD_FIELD, pushPromise.getMethod());
}
/**
* Creates native {@code Http2PushPromise} from PushPromise object.
*
* @param pushPromiseObj the PushPromise object
* @return the populated the native {@code Http2PushPromise}
*/
public static Http2PushPromise createHttpPushPromise(ObjectValue pushPromiseObj) {
String method = pushPromiseObj.get(HttpConstants.PUSH_PROMISE_METHOD_FIELD).toString();
if (method == null || method.isEmpty()) {
method = HttpConstants.HTTP_METHOD_GET;
}
String path = pushPromiseObj.get(HttpConstants.PUSH_PROMISE_PATH_FIELD).toString();
if (path == null || path.isEmpty()) {
path = HttpConstants.DEFAULT_BASE_PATH;
}
return new Http2PushPromise(method, path);
}
public static void addCarbonMsg(ObjectValue struct, HttpCarbonMessage httpCarbonMessage) {
struct.addNativeData(TRANSPORT_MESSAGE, httpCarbonMessage);
}
public static void populateInboundRequest(ObjectValue inboundRequest, ObjectValue entity,
HttpCarbonMessage inboundRequestMsg) {
inboundRequest.addNativeData(TRANSPORT_MESSAGE, inboundRequestMsg);
inboundRequest.addNativeData(REQUEST, true);
MapValue mutualSslRecord = ValueCreatorUtils.createHTTPRecordValue(MUTUAL_SSL_HANDSHAKE_RECORD);
mutualSslRecord.put(REQUEST_MUTUAL_SSL_HANDSHAKE_STATUS,
inboundRequestMsg.getProperty(HttpConstants.MUTUAL_SSL_RESULT));
mutualSslRecord.put(MUTUAL_SSL_CERTIFICATE, inboundRequestMsg.getProperty(HttpConstants.BASE_64_ENCODED_CERT));
inboundRequest.set(REQUEST_MUTUAL_SSL_HANDSHAKE_FIELD, mutualSslRecord);
enrichWithInboundRequestInfo(inboundRequest, inboundRequestMsg);
enrichWithInboundRequestHeaders(inboundRequest, inboundRequestMsg);
populateEntity(entity, inboundRequestMsg);
inboundRequest.set(REQUEST_ENTITY_FIELD, entity);
inboundRequest.addNativeData(IS_BODY_BYTE_CHANNEL_ALREADY_SET, false);
String cacheControlHeader = inboundRequestMsg.getHeader(CACHE_CONTROL.toString());
if (cacheControlHeader != null) {
ObjectValue cacheControlObj = ValueCreatorUtils.createRequestCacheControlObject();
RequestCacheControlObj requestCacheControl = new RequestCacheControlObj(cacheControlObj);
requestCacheControl.populateStruct(cacheControlHeader);
inboundRequest.set(REQUEST_CACHE_CONTROL_FIELD, requestCacheControl.getObj());
}
}
private static void enrichWithInboundRequestHeaders(ObjectValue inboundRequestObj,
HttpCarbonMessage inboundRequestMsg) {
if (inboundRequestMsg.getHeader(HttpHeaderNames.USER_AGENT.toString()) != null) {
String agent = inboundRequestMsg.getHeader(HttpHeaderNames.USER_AGENT.toString());
inboundRequestObj.set(HttpConstants.REQUEST_USER_AGENT_FIELD, agent);
inboundRequestMsg.removeHeader(HttpHeaderNames.USER_AGENT.toString());
}
}
private static void enrichWithInboundRequestInfo(ObjectValue inboundRequestObj,
HttpCarbonMessage inboundRequestMsg) {
inboundRequestObj.set(HttpConstants.REQUEST_RAW_PATH_FIELD, inboundRequestMsg.getRequestUrl());
inboundRequestObj.set(HttpConstants.REQUEST_METHOD_FIELD, inboundRequestMsg.getHttpMethod());
inboundRequestObj.set(HttpConstants.REQUEST_VERSION_FIELD, inboundRequestMsg.getHttpVersion());
HttpResourceArguments resourceArgValues = (HttpResourceArguments) inboundRequestMsg.getProperty(
HttpConstants.RESOURCE_ARGS);
if (resourceArgValues != null && resourceArgValues.getMap().get(HttpConstants.EXTRA_PATH_INFO) != null) {
inboundRequestObj.set(HttpConstants.REQUEST_EXTRA_PATH_INFO_FIELD,
resourceArgValues.getMap().get(HttpConstants.EXTRA_PATH_INFO));
}
}
/**
* Populates the HTTP caller with native data.
*
* @param caller Represents the HTTP caller
* @param inboundMsg Represents carbon message
* @param config Represents service endpoint configuration
*/
public static void enrichHttpCallerWithNativeData(ObjectValue caller, HttpCarbonMessage inboundMsg,
MapValue config) {
caller.addNativeData(HttpConstants.TRANSPORT_MESSAGE, inboundMsg);
caller.set(HttpConstants.HTTP_CONNECTOR_CONFIG_FIELD, config);
}
/**
* Populates the HTTP caller with connection information.
* @param httpCaller Represents the HTTP caller
* @param inboundMsg Represents the carbon message
* @param httpResource Represents the Http Resource
* @param config Represents the service endpoint configuration
*/
public static void enrichHttpCallerWithConnectionInfo(ObjectValue httpCaller, HttpCarbonMessage inboundMsg,
HttpResource httpResource, MapValue config) {
MapValue<String, Object> remote = ValueCreatorUtils.createHTTPRecordValue(HttpConstants.REMOTE);
MapValue<String, Object> local = ValueCreatorUtils.createHTTPRecordValue(HttpConstants.LOCAL);
Object remoteSocketAddress = inboundMsg.getProperty(HttpConstants.REMOTE_ADDRESS);
if (remoteSocketAddress instanceof InetSocketAddress) {
InetSocketAddress inetSocketAddress = (InetSocketAddress) remoteSocketAddress;
String remoteHost = inetSocketAddress.getHostString();
long remotePort = inetSocketAddress.getPort();
remote.put(HttpConstants.REMOTE_HOST_FIELD, remoteHost);
remote.put(HttpConstants.REMOTE_PORT_FIELD, remotePort);
}
httpCaller.set(HttpConstants.REMOTE_STRUCT_FIELD, remote);
Object localSocketAddress = inboundMsg.getProperty(HttpConstants.LOCAL_ADDRESS);
if (localSocketAddress instanceof InetSocketAddress) {
InetSocketAddress inetSocketAddress = (InetSocketAddress) localSocketAddress;
String localHost = inetSocketAddress.getHostName();
long localPort = inetSocketAddress.getPort();
local.put(HttpConstants.LOCAL_HOST_FIELD, localHost);
local.put(HttpConstants.LOCAL_PORT_FIELD, localPort);
}
httpCaller.set(HttpConstants.LOCAL_STRUCT_INDEX, local);
httpCaller.set(HttpConstants.SERVICE_ENDPOINT_PROTOCOL_FIELD, inboundMsg.getProperty(HttpConstants.PROTOCOL));
httpCaller.set(HttpConstants.SERVICE_ENDPOINT_CONFIG_FIELD, config);
httpCaller.addNativeData(HttpConstants.HTTP_SERVICE, httpResource.getParentService());
httpCaller.addNativeData(HttpConstants.REMOTE_SOCKET_ADDRESS, remoteSocketAddress);
}
/**
* Populate inbound response with headers and entity.
* @param inboundResponse Ballerina struct to represent response
* @param entity Entity of the response
* @param inboundResponseMsg Represent carbon message.
*/
public static void populateInboundResponse(ObjectValue inboundResponse, ObjectValue entity,
HttpCarbonMessage inboundResponseMsg) {
inboundResponse.addNativeData(TRANSPORT_MESSAGE, inboundResponseMsg);
int statusCode = inboundResponseMsg.getHttpStatusCode();
inboundResponse.set(RESPONSE_STATUS_CODE_FIELD, (long) statusCode);
String reasonPhrase = inboundResponseMsg.getReasonPhrase();
inboundResponse.set(RESPONSE_REASON_PHRASE_FIELD, reasonPhrase);
if (inboundResponseMsg.getHeader(HttpHeaderNames.SERVER.toString()) != null) {
inboundResponse.set(HttpConstants.RESPONSE_SERVER_FIELD,
inboundResponseMsg.getHeader(HttpHeaderNames.SERVER.toString()));
inboundResponseMsg.removeHeader(HttpHeaderNames.SERVER.toString());
}
if (inboundResponseMsg.getProperty(RESOLVED_REQUESTED_URI) != null) {
inboundResponse.set(RESOLVED_REQUESTED_URI_FIELD,
inboundResponseMsg.getProperty(RESOLVED_REQUESTED_URI).toString());
}
String cacheControlHeader = inboundResponseMsg.getHeader(CACHE_CONTROL.toString());
if (cacheControlHeader != null) {
ResponseCacheControlObj responseCacheControl = new ResponseCacheControlObj(PROTOCOL_HTTP_PKG_ID,
RESPONSE_CACHE_CONTROL);
responseCacheControl.populateStruct(cacheControlHeader);
inboundResponse.set(RESPONSE_CACHE_CONTROL_FIELD, responseCacheControl.getObj());
}
populateEntity(entity, inboundResponseMsg);
inboundResponse.set(RESPONSE_ENTITY_FIELD, entity);
inboundResponse.addNativeData(IS_BODY_BYTE_CHANNEL_ALREADY_SET, false);
}
/**
* Populate entity with headers, content-type and content-length.
*
* @param entity Represent an entity struct
* @param cMsg Represent a carbon message
*/
private static void populateEntity(ObjectValue entity, HttpCarbonMessage cMsg) {
long contentLength = -1;
String lengthStr = cMsg.getHeader(HttpHeaderNames.CONTENT_LENGTH.toString());
try {
contentLength = lengthStr != null ? Long.parseLong(lengthStr) : contentLength;
MimeUtil.setContentLength(entity, contentLength);
} catch (NumberFormatException e) {
throw createHttpError("Invalid content length", HttpErrorType.INVALID_CONTENT_LENGTH);
}
entity.addNativeData(ENTITY_HEADERS, cMsg.getHeaders());
entity.addNativeData(ENTITY_TRAILER_HEADERS, cMsg.getTrailerHeaders());
}
/**
* Set headers and properties of request/response object to the outbound transport message.
*
* @param outboundMsg transport Http carbon message.
* @param outboundMsgObj req/resp object.
*/
public static void enrichOutboundMessage(HttpCarbonMessage outboundMsg, ObjectValue outboundMsgObj) {
setHeadersToTransportMessage(outboundMsg, outboundMsgObj);
setPropertiesToTransportMessage(outboundMsg, outboundMsgObj);
}
private static void setHeadersToTransportMessage(HttpCarbonMessage outboundMsg, ObjectValue messageObj) {
boolean request = isRequest(messageObj);
ObjectValue entityObj = (ObjectValue) messageObj
.get(request ? REQUEST_ENTITY_FIELD : RESPONSE_ENTITY_FIELD);
HttpHeaders transportHeaders = outboundMsg.getHeaders();
if (request || isResponse(messageObj)) {
addRemovedPropertiesBackToHeadersMap(messageObj, transportHeaders);
}
HttpHeaders httpHeaders = (HttpHeaders) entityObj.getNativeData(ENTITY_HEADERS);
if (httpHeaders != transportHeaders) {
if (httpHeaders != null) {
transportHeaders.add(httpHeaders);
}
entityObj.addNativeData(ENTITY_HEADERS, outboundMsg.getHeaders());
}
if (!request) {
HttpHeaders transportTrailingHeaders = outboundMsg.getTrailerHeaders();
HttpHeaders trailingHeaders = (HttpHeaders) entityObj.getNativeData(ENTITY_TRAILER_HEADERS);
if (trailingHeaders != null && trailingHeaders != transportTrailingHeaders) {
transportTrailingHeaders.add(trailingHeaders);
}
}
}
private static boolean isRequest(ObjectValue value) {
return value.getType().getName().equals(REQUEST);
}
private static boolean isResponse(ObjectValue value) {
return value.getType().getName().equals(HttpConstants.RESPONSE);
}
private static void addRemovedPropertiesBackToHeadersMap(ObjectValue messageObj, HttpHeaders transportHeaders) {
if (isRequest(messageObj)) {
Object userAgent = messageObj.get(HttpConstants.REQUEST_USER_AGENT_FIELD);
if (userAgent != null && !userAgent.toString().isEmpty()) {
transportHeaders.set(HttpHeaderNames.USER_AGENT.toString(), userAgent.toString());
}
} else {
Object server = messageObj.get(HttpConstants.RESPONSE_SERVER_FIELD);
if (server != null && !server.toString().isEmpty()) {
transportHeaders.set(HttpHeaderNames.SERVER.toString(), server.toString());
}
}
}
private static void setPropertiesToTransportMessage(HttpCarbonMessage outboundResponseMsg, ObjectValue messageObj) {
if (isResponse(messageObj)) {
long statusCode = (Long) messageObj.get(RESPONSE_STATUS_CODE_FIELD);
if (statusCode != 0) {
outboundResponseMsg.setHttpStatusCode(getIntValue(statusCode));
}
Object respPhrase = messageObj.get(RESPONSE_REASON_PHRASE_FIELD);
if (respPhrase != null && !respPhrase.toString().isEmpty()) {
outboundResponseMsg.setProperty(HttpConstants.HTTP_REASON_PHRASE, respPhrase.toString());
}
}
}
/**
* Check the existence of entity. Set new entity of not present.
*
* @param value request/response struct.
*/
public static void checkEntityAvailability(ObjectValue value) {
ObjectValue entity = (ObjectValue) value.get(isRequest(value) ? REQUEST_ENTITY_FIELD : RESPONSE_ENTITY_FIELD);
if (entity == null) {
createNewEntity(value);
}
}
/**
* Check the existence of content-length and transfer-encoding headers.
*
* @param message transport message
* @return true if the headers are available else false.
*/
public static Boolean checkRequestBodySizeHeadersAvailability(HttpCarbonMessage message) {
String contentLength = message.getHeader(HttpHeaderNames.CONTENT_LENGTH.toString());
String transferEncoding = message.getHeader(HttpHeaderNames.TRANSFER_ENCODING.toString());
return contentLength != null || transferEncoding != null;
}
/**
* Check the existence of the message entity data source.
*
* @param value request/response object.
* @return true if the message entity data source is available else false.
*/
public static boolean isEntityDataSourceAvailable(ObjectValue value) {
ObjectValue entityObj = (ObjectValue) value
.get(isRequest(value) ? REQUEST_ENTITY_FIELD : RESPONSE_ENTITY_FIELD);
return (entityObj != null && EntityBodyHandler.getMessageDataSource(entityObj) != null);
}
private static void setCompressionHeaders(MapValue<String, Object> compressionConfig, HttpCarbonMessage requestMsg,
HttpCarbonMessage outboundResponseMsg) {
if (!checkConfigAnnotationAvailability(compressionConfig)) {
return;
}
String contentEncoding = outboundResponseMsg.getHeaders().get(HttpHeaderNames.CONTENT_ENCODING);
if (contentEncoding != null) {
return;
}
CompressionConfigState compressionState = getCompressionState(
compressionConfig.getStringValue(ANN_CONFIG_ATTR_COMPRESSION_ENABLE));
if (compressionState == CompressionConfigState.NEVER) {
outboundResponseMsg.getHeaders().set(HttpHeaderNames.CONTENT_ENCODING, HTTP_TRANSFER_ENCODING_IDENTITY);
return;
}
String acceptEncodingValue = requestMsg.getHeaders().get(HttpHeaderNames.ACCEPT_ENCODING);
List<String> contentTypesAnnotationValues = getAsStringList(
compressionConfig.getArrayValue(ANN_CONFIG_ATTR_COMPRESSION_CONTENT_TYPES).getStringArray());
String contentType = outboundResponseMsg.getHeader(HttpHeaderNames.CONTENT_TYPE.toString());
if (contentTypesAnnotationValues.isEmpty() || isContentTypeMatched(contentTypesAnnotationValues, contentType)) {
if (compressionState == CompressionConfigState.ALWAYS &&
(acceptEncodingValue == null || HTTP_TRANSFER_ENCODING_IDENTITY.equals(acceptEncodingValue))) {
outboundResponseMsg.getHeaders().set(HttpHeaderNames.CONTENT_ENCODING, ENCODING_GZIP);
}
} else {
outboundResponseMsg.getHeaders().set(HttpHeaderNames.CONTENT_ENCODING, HTTP_TRANSFER_ENCODING_IDENTITY);
}
}
public static CompressionConfigState getCompressionState(String compressionState) {
switch (compressionState) {
case AUTO:
return CompressionConfigState.AUTO;
case ALWAYS:
return CompressionConfigState.ALWAYS;
case NEVER:
return CompressionConfigState.NEVER;
default:
return null;
}
}
private static boolean isContentTypeMatched(List<String> contentTypes, String contentType) {
return contentType != null && contentTypes.stream().anyMatch(contentType.toLowerCase()::contains);
}
private static List<String> getAsStringList(Object[] values) {
List<String> valuesList = new ArrayList<>();
if (values == null) {
return valuesList;
}
for (Object val : values) {
valuesList.add(val.toString().trim().toLowerCase());
}
return valuesList;
}
public static String getListenerInterface(String host, int port) {
host = host != null ? host : "0.0.0.0";
return host + ":" + port;
}
public static ChunkConfig getChunkConfig(String chunkConfig) {
switch (chunkConfig) {
case HttpConstants.AUTO:
return ChunkConfig.AUTO;
case HttpConstants.ALWAYS:
return ChunkConfig.ALWAYS;
case NEVER:
return ChunkConfig.NEVER;
default:
throw new BallerinaConnectorException(
"Invalid configuration found for Transfer-Encoding: " + chunkConfig);
}
}
public static KeepAliveConfig getKeepAliveConfig(String keepAliveConfig) {
switch (keepAliveConfig) {
case HttpConstants.AUTO:
return KeepAliveConfig.AUTO;
case HttpConstants.ALWAYS:
return KeepAliveConfig.ALWAYS;
case NEVER:
return KeepAliveConfig.NEVER;
default:
throw new BallerinaConnectorException(
"Invalid configuration found for Keep-Alive: " + keepAliveConfig);
}
}
public static ForwardedExtensionConfig getForwardedExtensionConfig(String forwarded) {
ForwardedExtensionConfig forwardedConfig;
if (HttpConstants.FORWARDED_ENABLE.equalsIgnoreCase(forwarded)) {
forwardedConfig = ForwardedExtensionConfig.ENABLE;
} else if (HttpConstants.FORWARDED_TRANSITION.equalsIgnoreCase(forwarded)) {
forwardedConfig = ForwardedExtensionConfig.TRANSITION;
} else if (HttpConstants.FORWARDED_DISABLE.equalsIgnoreCase(forwarded)) {
forwardedConfig = ForwardedExtensionConfig.DISABLE;
} else {
throw new BallerinaConnectorException("Invalid configuration found for Forwarded : " + forwarded);
}
return forwardedConfig;
}
public static HttpCarbonMessage createHttpCarbonMessage(boolean isRequest) {
HttpCarbonMessage httpCarbonMessage;
if (isRequest) {
httpCarbonMessage = new HttpCarbonMessage(
new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, ""));
} else {
httpCarbonMessage = new HttpCarbonMessage(
new DefaultHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK));
}
httpCarbonMessage.completeMessage();
return httpCarbonMessage;
}
public static void checkFunctionValidity(ObjectValue connectionObj, HttpCarbonMessage reqMsg,
HttpCarbonMessage outboundResponseMsg) {
serverConnectionStructCheck(reqMsg);
int statusCode = outboundResponseMsg.getHttpStatusCode();
methodInvocationCheck(connectionObj, reqMsg, statusCode);
}
private static void methodInvocationCheck(ObjectValue connectionObj, HttpCarbonMessage reqMsg, int statusCode) {
if (connectionObj.getNativeData(METHOD_ACCESSED) != null || reqMsg == null) {
throw new IllegalStateException("illegal function invocation");
}
if (!is100ContinueRequest(reqMsg, statusCode)) {
connectionObj.addNativeData(METHOD_ACCESSED, true);
}
}
public static void serverConnectionStructCheck(HttpCarbonMessage reqMsg) {
if (reqMsg == null) {
throw createHttpError("operation not allowed:invalid Connection variable",
HttpErrorType.GENERIC_LISTENER_ERROR);
}
}
private static boolean is100ContinueRequest(HttpCarbonMessage reqMsg, int statusCode) {
return HttpConstants.HEADER_VAL_100_CONTINUE.equalsIgnoreCase(
reqMsg.getHeader(HttpHeaderNames.EXPECT.toString())) || statusCode == 100;
}
public static MapValue getTransactionConfigAnnotation(AttachedFunction resource, String transactionPackagePath) {
return (MapValue) resource.getAnnotation(transactionPackagePath,
TransactionConstants.ANN_NAME_TRX_PARTICIPANT_CONFIG);
}
private static int getIntValue(long val) {
int intVal = (int) val;
if (intVal != val) {
throw new IllegalArgumentException("invalid argument: " + val);
}
return intVal;
}
public static String getContentTypeFromTransportMessage(HttpCarbonMessage transportMessage) {
return transportMessage.getHeader(HttpHeaderNames.CONTENT_TYPE.toString());
}
/**
* If the given Content-Type header value doesn't have a boundary parameter value, get a new boundary string and
* append it to Content-Type and set it to transport message.
*
* @param transportMessage Represent transport message
* @param contentType Represent the Content-Type header value
* @return The boundary string that was extracted from header or the newly generated one
*/
public static String addBoundaryIfNotExist(HttpCarbonMessage transportMessage, String contentType) {
String boundaryValue = HeaderUtil.extractBoundaryParameter(contentType);
if (boundaryValue != null) {
boundaryValue = sanitizeBoundary(boundaryValue);
boolean validateContentType = MimeUtil.isValidateContentType(contentType);
if (!validateContentType) {
String headerValue = HeaderUtil.getHeaderValue(contentType);
MapValue<String, String> paramMap = HeaderUtil.getParamMap(contentType);
paramMap.put(BOUNDARY, MimeUtil.includeQuotes(boundaryValue));
contentType = HeaderUtil.appendHeaderParams(new StringBuilder(headerValue).append(";"), paramMap);
transportMessage.setHeader(String.valueOf(CONTENT_TYPE), contentType);
}
return boundaryValue;
}
return HttpUtil.addBoundaryParameter(transportMessage, contentType);
}
/**
* Generate a new boundary string and append it Content-Type and set that to transport message.
*
* @param transportMessage Represent transport message
* @param contentType Represent the Content-Type header value
* @return The newly generated boundary string
*/
private static String addBoundaryParameter(HttpCarbonMessage transportMessage, String contentType) {
String boundaryString = null;
if (contentType != null && contentType.startsWith(MULTIPART_AS_PRIMARY_TYPE)) {
boundaryString = MimeUtil.getNewMultipartDelimiter();
transportMessage.setHeader(HttpHeaderNames.CONTENT_TYPE.toString(), contentType + "; " + BOUNDARY + "=" +
boundaryString);
}
return boundaryString;
}
/**
* Sanitize the boundary string by removing leading and trailing double quotes.
*
* @param boundaryString Represent boundary string
* @return Sanitized boundary string
*/
private static String sanitizeBoundary(String boundaryString) {
return boundaryString.replaceAll("^\"|\"$", "");
}
public static HttpWsConnectorFactory createHttpWsConnectionFactory() {
return new DefaultHttpWsConnectorFactory();
}
public static void checkAndObserveHttpRequest(Strand strand, HttpCarbonMessage message) {
Optional<ObserverContext> observerContext = ObserveUtils.getObserverContextOfCurrentFrame(strand);
observerContext.ifPresent(ctx -> {
HttpUtil.injectHeaders(message, ObserveUtils.getContextProperties(strand.observerContext));
strand.observerContext.addTag(TAG_KEY_HTTP_METHOD, message.getHttpMethod());
if (!ConfigRegistry.getInstance().getAsBoolean(CONFIG_CLIENT_HTTP_URL_DISABLED)) {
strand.observerContext.addTag(TAG_KEY_HTTP_URL, String.valueOf(message.getProperty(HttpConstants.TO)));
}
strand.observerContext.addTag(TAG_KEY_PEER_ADDRESS,
message.getProperty(PROPERTY_HTTP_HOST) + ":" + message.getProperty(PROPERTY_HTTP_PORT));
strand.observerContext.addTag(TAG_KEY_HTTP_STATUS_CODE, Integer.toString(0));
});
}
public static void injectHeaders(HttpCarbonMessage msg, Map<String, String> headers) {
if (headers != null) {
headers.forEach((key, value) -> msg.setHeader(key, String.valueOf(value)));
}
}
private static void setChunkingHeader(String transferValue, HttpCarbonMessage outboundResponseMsg) {
if (transferValue == null) {
return;
}
outboundResponseMsg.setProperty(CHUNKING_CONFIG, getChunkConfig(transferValue));
}
/**
* Creates InResponse using the native {@code HttpCarbonMessage}.
*
* @param httpCarbonMessage the HttpCarbonMessage
* @return the Response struct
*/
public static ObjectValue createResponseStruct(HttpCarbonMessage httpCarbonMessage) {
ObjectValue responseObj = ValueCreatorUtils.createResponseObject();
ObjectValue entity = ValueCreatorUtils.createEntityObject();
HttpUtil.populateInboundResponse(responseObj, entity, httpCarbonMessage);
return responseObj;
}
public static void populateSenderConfigurations(SenderConfiguration senderConfiguration,
MapValue<String, Object> clientEndpointConfig, String scheme) {
ProxyServerConfiguration proxyServerConfiguration;
MapValue secureSocket = clientEndpointConfig.getMapValue(HttpConstants.ENDPOINT_CONFIG_SECURE_SOCKET);
String httpVersion = clientEndpointConfig.getStringValue(HttpConstants.CLIENT_EP_HTTP_VERSION);
if (secureSocket != null) {
HttpUtil.populateSSLConfiguration(senderConfiguration, secureSocket);
} else if (scheme.equals(PROTOCOL_HTTPS)) {
if (httpVersion.equals(HTTP_2_0_VERSION)) {
throw createHttpError("To enable https you need to configure secureSocket record",
HttpErrorType.SSL_ERROR);
} else {
senderConfiguration.useJavaDefaults();
}
}
if (HTTP_1_1_VERSION.equals(httpVersion)) {
MapValue<String, Object> http1Settings = (MapValue<String, Object>) clientEndpointConfig
.get(HttpConstants.HTTP1_SETTINGS);
MapValue proxy = http1Settings.getMapValue(HttpConstants.PROXY_STRUCT_REFERENCE);
if (proxy != null) {
String proxyHost = proxy.getStringValue(HttpConstants.PROXY_HOST);
int proxyPort = proxy.getIntValue(HttpConstants.PROXY_PORT).intValue();
String proxyUserName = proxy.getStringValue(HttpConstants.PROXY_USERNAME);
String proxyPassword = proxy.getStringValue(HttpConstants.PROXY_PASSWORD);
try {
proxyServerConfiguration = new ProxyServerConfiguration(proxyHost, proxyPort);
} catch (UnknownHostException e) {
throw new BallerinaConnectorException("Failed to resolve host" + proxyHost, e);
}
if (!proxyUserName.isEmpty()) {
proxyServerConfiguration.setProxyUsername(proxyUserName);
}
if (!proxyPassword.isEmpty()) {
proxyServerConfiguration.setProxyPassword(proxyPassword);
}
senderConfiguration.setProxyServerConfiguration(proxyServerConfiguration);
}
}
long timeoutMillis = clientEndpointConfig.getIntValue(HttpConstants.CLIENT_EP_ENDPOINT_TIMEOUT);
if (timeoutMillis < 0) {
senderConfiguration.setSocketIdleTimeout(0);
} else {
senderConfiguration.setSocketIdleTimeout(
validateConfig(timeoutMillis, HttpConstants.CLIENT_EP_ENDPOINT_TIMEOUT));
}
if (httpVersion != null) {
senderConfiguration.setHttpVersion(httpVersion);
}
String forwardedExtension = clientEndpointConfig.getStringValue(HttpConstants.CLIENT_EP_FORWARDED);
senderConfiguration.setForwardedExtensionConfig(HttpUtil.getForwardedExtensionConfig(forwardedExtension));
}
public static ConnectionManager getConnectionManager(MapValue<String, Long> poolStruct) {
ConnectionManager poolManager = (ConnectionManager) poolStruct.getNativeData(CONNECTION_MANAGER);
if (poolManager == null) {
synchronized (poolStruct) {
if (poolStruct.getNativeData(CONNECTION_MANAGER) == null) {
PoolConfiguration userDefinedPool = new PoolConfiguration();
populatePoolingConfig(poolStruct, userDefinedPool);
poolManager = new ConnectionManager(userDefinedPool);
poolStruct.addNativeData(CONNECTION_MANAGER, poolManager);
}
}
}
return poolManager;
}
public static void populatePoolingConfig(MapValue<String, Long> poolRecord, PoolConfiguration poolConfiguration) {
long maxActiveConnections = poolRecord.get(HttpConstants.CONNECTION_POOLING_MAX_ACTIVE_CONNECTIONS);
poolConfiguration.setMaxActivePerPool(
validateConfig(maxActiveConnections, HttpConstants.CONNECTION_POOLING_MAX_ACTIVE_CONNECTIONS));
long maxIdleConnections = poolRecord.get(HttpConstants.CONNECTION_POOLING_MAX_IDLE_CONNECTIONS);
poolConfiguration.setMaxIdlePerPool(
validateConfig(maxIdleConnections, HttpConstants.CONNECTION_POOLING_MAX_IDLE_CONNECTIONS));
long waitTime = poolRecord.get(HttpConstants.CONNECTION_POOLING_WAIT_TIME);
poolConfiguration.setMaxWaitTime(waitTime);
long maxActiveStreamsPerConnection = poolRecord.get(CONNECTION_POOLING_MAX_ACTIVE_STREAMS_PER_CONNECTION);
poolConfiguration.setHttp2MaxActiveStreamsPerConnection(
maxActiveStreamsPerConnection == -1 ? Integer.MAX_VALUE : validateConfig(maxActiveStreamsPerConnection,
CONNECTION_POOLING_MAX_ACTIVE_STREAMS_PER_CONNECTION));
}
private static int validateConfig(long value, String configName) {
try {
return Math.toIntExact(value);
} catch (ArithmeticException e) {
log.warn("The value set for the configuration needs to be less than {}. The " + configName +
"value is set to {}", Integer.MAX_VALUE);
return Integer.MAX_VALUE;
}
}
/**
* Populates SSL configuration instance with secure socket configuration.
*
* @param sslConfiguration ssl configuration instance.
* @param secureSocket secure socket configuration.
*/
public static void populateSSLConfiguration(SslConfiguration sslConfiguration, MapValue secureSocket) {
MapValue trustStore = secureSocket.getMapValue(ENDPOINT_CONFIG_TRUST_STORE);
MapValue keyStore = secureSocket.getMapValue(ENDPOINT_CONFIG_KEY_STORE);
MapValue protocols = secureSocket.getMapValue(ENDPOINT_CONFIG_PROTOCOLS);
MapValue validateCert = secureSocket.getMapValue(ENDPOINT_CONFIG_VALIDATE_CERT);
String keyFile = secureSocket.getStringValue(ENDPOINT_CONFIG_KEY);
String certFile = secureSocket.getStringValue(ENDPOINT_CONFIG_CERTIFICATE);
String trustCerts = secureSocket.getStringValue(ENDPOINT_CONFIG_TRUST_CERTIFICATES);
String keyPassword = secureSocket.getStringValue(ENDPOINT_CONFIG_KEY_PASSWORD);
boolean disableSslValidation = secureSocket.getBooleanValue(ENDPOINT_CONFIG_DISABLE_SSL);
List<Parameter> clientParams = new ArrayList<>();
if (disableSslValidation) {
sslConfiguration.disableSsl();
return;
} else if (StringUtils.isEmpty(trustCerts) && trustStore == null) {
sslConfiguration.useJavaDefaults();
return;
}
if (trustStore != null && StringUtils.isNotBlank(trustCerts)) {
throw createHttpError("Cannot configure both trustStore and trustCerts at the same time.",
HttpErrorType.SSL_ERROR);
}
if (trustStore != null) {
String trustStoreFile = trustStore.getStringValue(FILE_PATH);
if (StringUtils.isNotBlank(trustStoreFile)) {
sslConfiguration.setTrustStoreFile(trustStoreFile);
}
String trustStorePassword = trustStore.getStringValue(PASSWORD);
if (StringUtils.isNotBlank(trustStorePassword)) {
sslConfiguration.setTrustStorePass(trustStorePassword);
}
} else if (StringUtils.isNotBlank(trustCerts)) {
sslConfiguration.setClientTrustCertificates(trustCerts);
}
if (keyStore != null && StringUtils.isNotBlank(keyFile)) {
throw createHttpError("Cannot configure both keyStore and keyFile.", HttpErrorType.SSL_ERROR);
} else if (StringUtils.isNotBlank(keyFile) && StringUtils.isBlank(certFile)) {
throw createHttpError("Need to configure certFile containing client ssl certificates.",
HttpErrorType.SSL_ERROR);
}
if (keyStore != null) {
String keyStoreFile = keyStore.getStringValue(FILE_PATH);
if (StringUtils.isNotBlank(keyStoreFile)) {
sslConfiguration.setKeyStoreFile(keyStoreFile);
}
String keyStorePassword = keyStore.getStringValue(PASSWORD);
if (StringUtils.isNotBlank(keyStorePassword)) {
sslConfiguration.setKeyStorePass(keyStorePassword);
}
} else if (StringUtils.isNotBlank(keyFile)) {
sslConfiguration.setClientKeyFile(keyFile);
sslConfiguration.setClientCertificates(certFile);
if (StringUtils.isNotBlank(keyPassword)) {
sslConfiguration.setClientKeyPassword(keyPassword);
}
}
if (protocols != null) {
List<String> sslEnabledProtocolsValueList = Arrays.asList(
protocols.getArrayValue(ENABLED_PROTOCOLS).getStringArray());
if (!sslEnabledProtocolsValueList.isEmpty()) {
String sslEnabledProtocols = sslEnabledProtocolsValueList.stream()
.collect(Collectors.joining(",", "", ""));
Parameter clientProtocols = new Parameter(ANN_CONFIG_ATTR_SSL_ENABLED_PROTOCOLS, sslEnabledProtocols);
clientParams.add(clientProtocols);
}
String sslProtocol = protocols.getStringValue(SSL_PROTOCOL_VERSION);
if (StringUtils.isNotBlank(sslProtocol)) {
sslConfiguration.setSSLProtocol(sslProtocol);
}
}
if (validateCert != null) {
boolean validateCertEnabled = validateCert.getBooleanValue(HttpConstants.ENABLE);
int cacheSize = validateCert.getIntValue(HttpConstants.SSL_CONFIG_CACHE_SIZE).intValue();
int cacheValidityPeriod = validateCert.getIntValue(HttpConstants.SSL_CONFIG_CACHE_VALIDITY_PERIOD)
.intValue();
sslConfiguration.setValidateCertEnabled(validateCertEnabled);
if (cacheValidityPeriod != 0) {
sslConfiguration.setCacheValidityPeriod(cacheValidityPeriod);
}
if (cacheSize != 0) {
sslConfiguration.setCacheSize(cacheSize);
}
}
boolean hostNameVerificationEnabled = secureSocket
.getBooleanValue(HttpConstants.SSL_CONFIG_HOST_NAME_VERIFICATION_ENABLED);
boolean ocspStaplingEnabled = secureSocket.getBooleanValue(HttpConstants.ENDPOINT_CONFIG_OCSP_STAPLING);
sslConfiguration.setOcspStaplingEnabled(ocspStaplingEnabled);
sslConfiguration.setHostNameVerificationEnabled(hostNameVerificationEnabled);
sslConfiguration
.setSslSessionTimeOut((int) secureSocket.getDefaultableIntValue(ENDPOINT_CONFIG_SESSION_TIMEOUT));
sslConfiguration.setSslHandshakeTimeOut(secureSocket.getDefaultableIntValue(ENDPOINT_CONFIG_HANDSHAKE_TIMEOUT));
Object[] cipherConfigs = secureSocket.getArrayValue(HttpConstants.SSL_CONFIG_CIPHERS).getStringArray();
if (cipherConfigs != null) {
List<Object> ciphersValueList = Arrays.asList(cipherConfigs);
if (ciphersValueList.size() > 0) {
String ciphers = ciphersValueList.stream().map(Object::toString)
.collect(Collectors.joining(",", "", ""));
Parameter clientCiphers = new Parameter(HttpConstants.CIPHERS, ciphers);
clientParams.add(clientCiphers);
}
}
String enableSessionCreation = String.valueOf(secureSocket
.getBooleanValue(HttpConstants.SSL_CONFIG_ENABLE_SESSION_CREATION));
Parameter clientEnableSessionCreation = new Parameter(HttpConstants.SSL_CONFIG_ENABLE_SESSION_CREATION,
enableSessionCreation);
clientParams.add(clientEnableSessionCreation);
if (!clientParams.isEmpty()) {
sslConfiguration.setParameters(clientParams);
}
}
public static String sanitizeBasePath(String basePath) {
basePath = basePath.trim();
if (!basePath.startsWith(HttpConstants.DEFAULT_BASE_PATH)) {
basePath = HttpConstants.DEFAULT_BASE_PATH.concat(basePath);
}
if ((basePath.endsWith(HttpConstants.DEFAULT_BASE_PATH) && basePath.length() != 1)) {
basePath = basePath.substring(0, basePath.length() - 1);
}
if (basePath.endsWith("*")) {
basePath = basePath.substring(0, basePath.length() - 1);
}
return basePath;
}
/**
* Serialize outbound message.
*
* @param outboundMessageSource Represent the outbound message datasource
* @param entity Represent the entity of the outbound message
* @param messageOutputStream Represent the output stream
* @throws IOException In case an error occurs while writing to output stream
*/
public static void serializeDataSource(Object outboundMessageSource, ObjectValue entity,
OutputStream messageOutputStream) throws IOException {
if (MimeUtil.generateAsJSON(outboundMessageSource, entity)) {
JSONGenerator gen = new JSONGenerator(messageOutputStream);
gen.serialize(outboundMessageSource);
gen.flush();
} else {
serialize(outboundMessageSource, messageOutputStream);
}
}
public static void serialize(Object value, OutputStream outputStream) throws IOException {
if (value == null) {
throw createHttpError("error occurred while serializing null data");
} else if (value instanceof ArrayValue) {
if (value instanceof StreamingJsonValue) {
((StreamingJsonValue) value).serialize(outputStream);
} else {
((ArrayValue) value).serialize(outputStream);
}
} else if (value instanceof MultipartDataSource) {
((MultipartDataSource) value).serialize(outputStream);
} else if (value instanceof XMLItem) {
((XMLItem) value).serialize(outputStream);
} else if (value instanceof XMLSequence) {
((XMLSequence) value).serialize(outputStream);
} else if (value instanceof Long || value instanceof String ||
value instanceof Double || value instanceof Integer || value instanceof Boolean) {
outputStream.write(value.toString().getBytes(Charset.defaultCharset()));
} else {
((RefValue) value).serialize(outputStream);
}
}
/**
* Check the availability of an annotation.
*
* @param configAnnotation Represent the annotation
* @return True if the annotation and the annotation value are available
*/
public static boolean checkConfigAnnotationAvailability(MapValue configAnnotation) {
return configAnnotation != null;
}
/**
* Returns Listener configuration instance populated with endpoint config.
*
* @param port listener port.
* @param endpointConfig listener endpoint configuration.
* @return transport listener configuration instance.
*/
public static ListenerConfiguration getListenerConfig(long port, MapValue endpointConfig) {
String host = endpointConfig.getStringValue(HttpConstants.ENDPOINT_CONFIG_HOST);
MapValue sslConfig = endpointConfig.getMapValue(HttpConstants.ENDPOINT_CONFIG_SECURE_SOCKET);
String httpVersion = endpointConfig.getStringValue(HttpConstants.ENDPOINT_CONFIG_VERSION);
long idleTimeout = endpointConfig.getIntValue(HttpConstants.ENDPOINT_CONFIG_TIMEOUT);
ListenerConfiguration listenerConfiguration = new ListenerConfiguration();
if (HTTP_1_1_VERSION.equals(httpVersion)) {
MapValue<String, Object> http1Settings =
(MapValue<String, Object>) endpointConfig.get(HttpConstants.HTTP1_SETTINGS);
listenerConfiguration.setPipeliningLimit(http1Settings.getIntValue(HttpConstants.PIPELINING_REQUEST_LIMIT));
String keepAlive = http1Settings.getStringValue(HttpConstants.ENDPOINT_CONFIG_KEEP_ALIVE);
listenerConfiguration.setKeepAliveConfig(HttpUtil.getKeepAliveConfig(keepAlive));
}
MapValue<String, Object> requestLimits =
(MapValue<String, Object>) endpointConfig.getMapValue(HttpConstants.REQUEST_LIMITS);
setInboundMgsSizeValidationConfig(requestLimits.getIntValue(HttpConstants.MAX_URI_LENGTH),
requestLimits.getIntValue(HttpConstants.MAX_HEADER_SIZE),
requestLimits.getIntValue(HttpConstants.MAX_ENTITY_BODY_SIZE),
listenerConfiguration.getMsgSizeValidationConfig());
if (host == null || host.trim().isEmpty()) {
listenerConfiguration.setHost(ConfigRegistry.getInstance().getConfigOrDefault("b7a.http.host",
HttpConstants.HTTP_DEFAULT_HOST));
} else {
listenerConfiguration.setHost(host);
}
if (port == 0) {
throw new BallerinaConnectorException("Listener port is not defined!");
}
listenerConfiguration.setPort(Math.toIntExact(port));
if (idleTimeout < 0) {
throw new BallerinaConnectorException("Idle timeout cannot be negative. If you want to disable the " +
"timeout please use value 0");
}
listenerConfiguration.setSocketIdleTimeout(Math.toIntExact(idleTimeout));
if (httpVersion != null) {
listenerConfiguration.setVersion(httpVersion);
}
if (endpointConfig.getType().getName().equalsIgnoreCase(LISTENER_CONFIGURATION)) {
String serverName = endpointConfig.getStringValue(SERVER_NAME);
listenerConfiguration.setServerHeader(serverName != null ? serverName : getServerName());
} else {
listenerConfiguration.setServerHeader(getServerName());
}
if (sslConfig != null) {
return setSslConfig(sslConfig, listenerConfiguration);
}
listenerConfiguration.setPipeliningEnabled(true);
Object webSocketCompressionEnabled = endpointConfig.get(WebSocketConstants.COMPRESSION_ENABLED_CONFIG);
if (webSocketCompressionEnabled != null) {
listenerConfiguration.setWebSocketCompressionEnabled((Boolean) webSocketCompressionEnabled);
}
return listenerConfiguration;
}
public static void setInboundMgsSizeValidationConfig(long maxInitialLineLength, long maxHeaderSize,
long maxEntityBodySize,
InboundMsgSizeValidationConfig sizeValidationConfig) {
if (maxInitialLineLength >= 0) {
sizeValidationConfig.setMaxInitialLineLength(Math.toIntExact(maxInitialLineLength));
} else {
throw new BallerinaConnectorException(
"Invalid configuration found for max initial line length : " + maxInitialLineLength);
}
if (maxHeaderSize >= 0) {
sizeValidationConfig.setMaxHeaderSize(Math.toIntExact(maxHeaderSize));
} else {
throw new BallerinaConnectorException("Invalid configuration found for maxHeaderSize : " + maxHeaderSize);
}
if (maxEntityBodySize != -1) {
if (maxEntityBodySize >= 0) {
sizeValidationConfig.setMaxEntityBodySize(maxEntityBodySize);
} else {
throw new BallerinaConnectorException(
"Invalid configuration found for maxEntityBodySize : " + maxEntityBodySize);
}
}
}
private static String getServerName() {
String userAgent;
String version = System.getProperty(BALLERINA_VERSION);
if (version != null) {
userAgent = "ballerina/" + version;
} else {
userAgent = "ballerina";
}
return userAgent;
}
private static ListenerConfiguration setSslConfig(MapValue sslConfig, ListenerConfiguration listenerConfiguration) {
listenerConfiguration.setScheme(PROTOCOL_HTTPS);
MapValue trustStore = sslConfig.getMapValue(ENDPOINT_CONFIG_TRUST_STORE);
MapValue keyStore = sslConfig.getMapValue(ENDPOINT_CONFIG_KEY_STORE);
MapValue protocols = sslConfig.getMapValue(ENDPOINT_CONFIG_PROTOCOLS);
MapValue validateCert = sslConfig.getMapValue(ENDPOINT_CONFIG_VALIDATE_CERT);
MapValue ocspStapling = sslConfig.getMapValue(ENDPOINT_CONFIG_OCSP_STAPLING);
String keyFile = sslConfig.getStringValue(ENDPOINT_CONFIG_KEY);
String certFile = sslConfig.getStringValue(ENDPOINT_CONFIG_CERTIFICATE);
String trustCerts = sslConfig.getStringValue(ENDPOINT_CONFIG_TRUST_CERTIFICATES);
String keyPassword = sslConfig.getStringValue(ENDPOINT_CONFIG_KEY_PASSWORD);
if (keyStore != null && StringUtils.isNotBlank(keyFile)) {
throw createHttpError("Cannot configure both keyStore and keyFile at the same time.",
HttpErrorType.SSL_ERROR);
} else if (keyStore == null && (StringUtils.isBlank(keyFile) || StringUtils.isBlank(certFile))) {
throw createHttpError("Either keystore or certificateKey and server certificates must be provided "
+ "for secure connection", HttpErrorType.SSL_ERROR);
}
if (keyStore != null) {
String keyStoreFile = keyStore.getStringValue(FILE_PATH);
if (StringUtils.isBlank(keyStoreFile)) {
throw createHttpError("Keystore file location must be provided for secure connection.",
HttpErrorType.SSL_ERROR);
}
String keyStorePassword = keyStore.getStringValue(PASSWORD);
if (StringUtils.isBlank(keyStorePassword)) {
throw createHttpError("Keystore password must be provided for secure connection",
HttpErrorType.SSL_ERROR);
}
listenerConfiguration.setKeyStoreFile(keyStoreFile);
listenerConfiguration.setKeyStorePass(keyStorePassword);
} else {
listenerConfiguration.setServerKeyFile(keyFile);
listenerConfiguration.setServerCertificates(certFile);
if (StringUtils.isNotBlank(keyPassword)) {
listenerConfiguration.setServerKeyPassword(keyPassword);
}
}
String sslVerifyClient = sslConfig.getStringValue(SSL_CONFIG_SSL_VERIFY_CLIENT);
listenerConfiguration.setVerifyClient(sslVerifyClient);
listenerConfiguration
.setSslSessionTimeOut((int) sslConfig.getDefaultableIntValue(ENDPOINT_CONFIG_SESSION_TIMEOUT));
listenerConfiguration
.setSslHandshakeTimeOut(sslConfig.getDefaultableIntValue(ENDPOINT_CONFIG_HANDSHAKE_TIMEOUT));
if (trustStore == null && StringUtils.isNotBlank(sslVerifyClient) && StringUtils.isBlank(trustCerts)) {
throw createHttpError("Truststore location or trustCertificates must be provided to enable Mutual SSL",
HttpErrorType.SSL_ERROR);
}
if (trustStore != null) {
String trustStoreFile = trustStore.getStringValue(FILE_PATH);
String trustStorePassword = trustStore.getStringValue(PASSWORD);
if (StringUtils.isBlank(trustStoreFile) && StringUtils.isNotBlank(sslVerifyClient)) {
throw createHttpError("Truststore location must be provided to enable Mutual SSL",
HttpErrorType.SSL_ERROR);
}
if (StringUtils.isBlank(trustStorePassword) && StringUtils.isNotBlank(sslVerifyClient)) {
throw createHttpError("Truststore password value must be provided to enable Mutual SSL",
HttpErrorType.SSL_ERROR);
}
listenerConfiguration.setTrustStoreFile(trustStoreFile);
listenerConfiguration.setTrustStorePass(trustStorePassword);
} else if (StringUtils.isNotBlank(trustCerts)) {
listenerConfiguration.setServerTrustCertificates(trustCerts);
}
List<Parameter> serverParamList = new ArrayList<>();
Parameter serverParameters;
if (protocols != null) {
List<String> sslEnabledProtocolsValueList = Arrays.asList(
protocols.getArrayValue(ENABLED_PROTOCOLS).getStringArray());
if (!sslEnabledProtocolsValueList.isEmpty()) {
String sslEnabledProtocols = sslEnabledProtocolsValueList.stream()
.collect(Collectors.joining(",", "", ""));
serverParameters = new Parameter(ANN_CONFIG_ATTR_SSL_ENABLED_PROTOCOLS, sslEnabledProtocols);
serverParamList.add(serverParameters);
}
String sslProtocol = protocols.getStringValue(SSL_PROTOCOL_VERSION);
if (StringUtils.isNotBlank(sslProtocol)) {
listenerConfiguration.setSSLProtocol(sslProtocol);
}
}
List<String> ciphersValueList = Arrays.asList(
sslConfig.getArrayValue(HttpConstants.SSL_CONFIG_CIPHERS).getStringArray());
if (!ciphersValueList.isEmpty()) {
String ciphers = ciphersValueList.stream().collect(Collectors.joining(",", "", ""));
serverParameters = new Parameter(HttpConstants.CIPHERS, ciphers);
serverParamList.add(serverParameters);
}
if (validateCert != null) {
boolean validateCertificateEnabled = validateCert.getBooleanValue(HttpConstants.ENABLE);
long cacheSize = validateCert.getIntValue(HttpConstants.SSL_CONFIG_CACHE_SIZE);
long cacheValidationPeriod = validateCert.getIntValue(HttpConstants.SSL_CONFIG_CACHE_VALIDITY_PERIOD);
listenerConfiguration.setValidateCertEnabled(validateCertificateEnabled);
if (validateCertificateEnabled) {
if (cacheSize != 0) {
listenerConfiguration.setCacheSize(Math.toIntExact(cacheSize));
}
if (cacheValidationPeriod != 0) {
listenerConfiguration.setCacheValidityPeriod(Math.toIntExact(cacheValidationPeriod));
}
}
}
if (ocspStapling != null) {
boolean ocspStaplingEnabled = ocspStapling.getBooleanValue(HttpConstants.ENABLE);
listenerConfiguration.setOcspStaplingEnabled(ocspStaplingEnabled);
long cacheSize = ocspStapling.getIntValue(HttpConstants.SSL_CONFIG_CACHE_SIZE);
long cacheValidationPeriod = ocspStapling.getIntValue(HttpConstants.SSL_CONFIG_CACHE_VALIDITY_PERIOD);
listenerConfiguration.setValidateCertEnabled(ocspStaplingEnabled);
if (ocspStaplingEnabled) {
if (cacheSize != 0) {
listenerConfiguration.setCacheSize(Math.toIntExact(cacheSize));
}
if (cacheValidationPeriod != 0) {
listenerConfiguration.setCacheValidityPeriod(Math.toIntExact(cacheValidationPeriod));
}
}
}
listenerConfiguration.setTLSStoreType(PKCS_STORE_TYPE);
String serverEnableSessionCreation = String
.valueOf(sslConfig.getBooleanValue(SSL_CONFIG_ENABLE_SESSION_CREATION));
Parameter enableSessionCreationParam = new Parameter(SSL_CONFIG_ENABLE_SESSION_CREATION,
serverEnableSessionCreation);
serverParamList.add(enableSessionCreationParam);
if (!serverParamList.isEmpty()) {
listenerConfiguration.setParameters(serverParamList);
}
listenerConfiguration
.setId(HttpUtil.getListenerInterface(listenerConfiguration.getHost(), listenerConfiguration.getPort()));
return listenerConfiguration;
}
public static String getServiceName(ObjectValue balService) {
String serviceTypeName = balService.getType().getName();
int serviceIndex = serviceTypeName.lastIndexOf("$$service$");
return serviceTypeName.substring(0, serviceIndex);
}
public static ErrorValue createHttpError(String reason, String errorName, String reasonType, String errorMsg) {
BType detailType = BValueCreator.createRecordValue(new BPackage(PACKAGE, MODULE), HTTP_ERROR_DETAIL_RECORD)
.getType();
int mask = TypeFlags.asMask(TypeFlags.ANYDATA, TypeFlags.PURETYPE);
Set<Object> valueSpace = new HashSet<>();
valueSpace.add(reason);
return BallerinaErrors.createError(
new BErrorType(errorName, new BPackage(PACKAGE, MODULE, HTTP_MODULE_VERSION),
new BFiniteType(REASON_RECORD, valueSpace, mask), detailType),
reasonType, errorMsg);
}
private HttpUtil() {
}
} | class HttpUtil {
public static final boolean TRUE = true;
public static final boolean FALSE = false;
private static final Logger log = LoggerFactory.getLogger(HttpUtil.class);
private static final String METHOD_ACCESSED = "isMethodAccessed";
private static final String IO_EXCEPTION_OCCURRED = "I/O exception occurred";
private static final String CHUNKING_CONFIG = "chunking_config";
/**
* Set new entity to in/out request/response struct.
*
* @param httpMessageStruct request/response struct.
* @return created entity.
*/
public static ObjectValue createNewEntity(ObjectValue httpMessageStruct) {
ObjectValue entity = ValueCreatorUtils.createEntityObject();
HttpCarbonMessage httpCarbonMessage = HttpUtil.getCarbonMsg(httpMessageStruct,
HttpUtil.createHttpCarbonMessage(isRequest(httpMessageStruct)));
entity.addNativeData(ENTITY_HEADERS, httpCarbonMessage.getHeaders());
entity.addNativeData(ENTITY_TRAILER_HEADERS, httpCarbonMessage.getTrailerHeaders());
entity.addNativeData(ENTITY_BYTE_CHANNEL, null);
httpMessageStruct.set(isRequest(httpMessageStruct) ? REQUEST_ENTITY_FIELD : RESPONSE_ENTITY_FIELD
, entity);
httpMessageStruct.addNativeData(IS_BODY_BYTE_CHANNEL_ALREADY_SET, false);
return entity;
}
/**
* Set the given entity to request or response message.
*
* @param messageObj Represent ballerina request/response
* @param entityObj Represent an entity
* @param isRequest boolean representing whether the message is a request or a response
*/
public static void setEntity(ObjectValue messageObj, ObjectValue entityObj, boolean isRequest) {
HttpCarbonMessage httpCarbonMessage = HttpUtil.getCarbonMsg(messageObj,
HttpUtil.createHttpCarbonMessage(isRequest));
String contentType = MimeUtil.getContentTypeWithParameters(entityObj);
if (checkEntityBodyAvailability(entityObj)) {
httpCarbonMessage.waitAndReleaseAllEntities();
if (contentType == null) {
contentType = OCTET_STREAM;
}
HeaderUtil.setHeaderToEntity(entityObj, HttpHeaderNames.CONTENT_TYPE.toString(), contentType);
}
messageObj.set(isRequest ? REQUEST_ENTITY_FIELD : RESPONSE_ENTITY_FIELD, entityObj);
messageObj.addNativeData(IS_BODY_BYTE_CHANNEL_ALREADY_SET, checkEntityBodyAvailability(entityObj));
}
/**
* Get the entity from request or response.
*
* @param messageObj Ballerina context
* @param isRequest boolean representing whether the message is a request or a response
* @param entityBodyRequired boolean representing whether the entity body is required
* @return Entity of the request or response
*/
public static ObjectValue getEntity(ObjectValue messageObj, boolean isRequest, boolean entityBodyRequired) {
ObjectValue entity = (ObjectValue) messageObj.get(isRequest ? REQUEST_ENTITY_FIELD : RESPONSE_ENTITY_FIELD);
boolean byteChannelAlreadySet = false;
if (messageObj.getNativeData(IS_BODY_BYTE_CHANNEL_ALREADY_SET) != null) {
byteChannelAlreadySet = (Boolean) messageObj.getNativeData(IS_BODY_BYTE_CHANNEL_ALREADY_SET);
}
if (entityBodyRequired && !byteChannelAlreadySet) {
populateEntityBody(messageObj, entity, isRequest, false);
}
return entity;
}
/**
* Populate entity with the relevant body content.
*
* @param messageObj Represent ballerina request/response
* @param entityObj Represent an entity
* @param request boolean representing whether the message is a request or a response
* @param streaming boolean representing whether the entity requires byte channel or message as native data
*/
public static void populateEntityBody(ObjectValue messageObj, ObjectValue entityObj, boolean request,
boolean streaming) {
HttpCarbonMessage httpCarbonMessage = HttpUtil
.getCarbonMsg(messageObj, HttpUtil.createHttpCarbonMessage(request));
String contentType = httpCarbonMessage.getHeader(HttpHeaderNames.CONTENT_TYPE.toString());
if (MimeUtil.isNotNullAndEmpty(contentType) && contentType.startsWith(MULTIPART_AS_PRIMARY_TYPE)
&& !streaming) {
MultipartDecoder.parseBody(entityObj, contentType,
new HttpMessageDataStreamer(httpCarbonMessage).getInputStream());
} else {
long contentLength = MimeUtil.extractContentLength(httpCarbonMessage);
if (contentLength > 0) {
if (streaming) {
entityObj.addNativeData(ENTITY_BYTE_CHANNEL, new EntityWrapper(
new EntityBodyChannel(new HttpMessageDataStreamer(httpCarbonMessage).getInputStream())));
} else {
entityObj.addNativeData(TRANSPORT_MESSAGE, httpCarbonMessage);
}
} else {
if (HttpHeaderValues.CHUNKED.toString().equals(
httpCarbonMessage.getHeader(HttpHeaderNames.TRANSFER_ENCODING.toString()))) {
entityObj.addNativeData(TRANSPORT_MESSAGE, httpCarbonMessage);
}
}
}
messageObj.set(request ? REQUEST_ENTITY_FIELD : RESPONSE_ENTITY_FIELD, entityObj);
messageObj.addNativeData(IS_BODY_BYTE_CHANNEL_ALREADY_SET, true);
}
public static ObjectValue extractEntity(ObjectValue request) {
Object isEntityBodyAvailable = request.getNativeData(IS_BODY_BYTE_CHANNEL_ALREADY_SET);
if (isEntityBodyAvailable == null || !((Boolean) isEntityBodyAvailable)) {
return null;
}
return (ObjectValue) request.get(isRequest(request) ? REQUEST_ENTITY_FIELD : RESPONSE_ENTITY_FIELD);
}
public static void closeMessageOutputStream(OutputStream messageOutputStream) {
try {
if (messageOutputStream != null) {
messageOutputStream.close();
}
} catch (IOException e) {
log.error("Couldn't close message output stream", e);
}
}
public static void prepareOutboundResponse(ObjectValue connectionObj, HttpCarbonMessage inboundRequestMsg,
HttpCarbonMessage outboundResponseMsg,
ObjectValue outboundResponseObj) {
HttpUtil.checkEntityAvailability(outboundResponseObj);
HttpUtil.addCorsHeaders(inboundRequestMsg, outboundResponseMsg);
HttpUtil.enrichOutboundMessage(outboundResponseMsg, outboundResponseObj);
HttpService httpService = (HttpService) connectionObj.getNativeData(HttpConstants.HTTP_SERVICE);
HttpUtil.setCompressionHeaders(httpService.getCompressionConfig(), inboundRequestMsg, outboundResponseMsg);
HttpUtil.setChunkingHeader(httpService.getChunkingConfig(), outboundResponseMsg);
}
private static void addCorsHeaders(HttpCarbonMessage requestMsg, HttpCarbonMessage responseMsg) {
if (requestMsg.getHeader(HttpHeaderNames.ORIGIN.toString()) != null) {
CorsHeaderGenerator.process(requestMsg, responseMsg, true);
}
}
/**
* This method should never be called directly to send out responses for ballerina HTTP 1.1. Use
* PipeliningHandler's sendPipelinedResponse() method instead.
*
* @param requestMsg Represent the request message
* @param responseMsg Represent the corresponding response
* @return HttpResponseFuture that represent the future results
*/
public static HttpResponseFuture sendOutboundResponse(HttpCarbonMessage requestMsg,
HttpCarbonMessage responseMsg) {
HttpResponseFuture responseFuture;
try {
responseFuture = requestMsg.respond(responseMsg);
} catch (ServerConnectorException e) {
throw new BallerinaConnectorException("Error occurred during response", e);
}
return responseFuture;
}
/**
* Sends an HTTP/2 Server Push message back to the client.
*
* @param requestMsg the request message associated to the server push response
* @param pushResponse the server push message
* @param pushPromise the push promise associated with the server push
* @return the future to get notifications of the operation asynchronously
*/
public static HttpResponseFuture pushResponse(HttpCarbonMessage requestMsg, HttpCarbonMessage pushResponse,
Http2PushPromise pushPromise) {
HttpResponseFuture responseFuture;
try {
responseFuture = requestMsg.pushResponse(pushResponse, pushPromise);
} catch (ServerConnectorException e) {
throw new BallerinaConnectorException("Error occurred while sending a server push message", e);
}
return responseFuture;
}
/**
* Sends an HTTP/2 Push Promise message back to the client.
*
* @param requestMsg the request message associated to the push promise
* @param pushPromise the push promise message
* @return the future to get notifications of the operation asynchronously
*/
public static HttpResponseFuture pushPromise(HttpCarbonMessage requestMsg, Http2PushPromise pushPromise) {
HttpResponseFuture responseFuture;
try {
responseFuture = requestMsg.pushPromise(pushPromise);
} catch (ServerConnectorException e) {
throw new BallerinaConnectorException("Error occurred during response", e);
}
return responseFuture;
}
public static void handleFailure(HttpCarbonMessage requestMessage, BallerinaConnectorException ex) {
String errorMsg = ex.getMessage();
int statusCode = getStatusCode(requestMessage, errorMsg);
sendPipelinedResponse(requestMessage, createErrorMessage(errorMsg, statusCode));
}
static void handleFailure(HttpCarbonMessage requestMessage, ErrorValue error) {
String errorMsg = getErrorMessage(error);
int statusCode = getStatusCode(requestMessage, errorMsg);
ErrorHandlerUtils.printError("error: " + error.getPrintableStackTrace());
sendPipelinedResponse(requestMessage, createErrorMessage(errorMsg, statusCode));
}
private static String getErrorMessage(ErrorValue error) {
MapValue errorDetails = (MapValue) error.getDetails();
if (!errorDetails.isEmpty()) {
return errorDetails.get(HTTP_ERROR_MESSAGE).toString();
}
return error.getReason();
}
private static int getStatusCode(HttpCarbonMessage requestMessage, String errorMsg) {
Integer carbonStatusCode = requestMessage.getHttpStatusCode();
if (carbonStatusCode == null) {
log.error(errorMsg);
return HttpResponseStatus.INTERNAL_SERVER_ERROR.code();
}
return carbonStatusCode;
}
private static String lowerCaseTheFirstLetter(String payload) {
if (!payload.isEmpty()) {
char[] characters = payload.toCharArray();
characters[0] = Character.toLowerCase(characters[0]);
payload = new String(characters);
}
return payload;
}
private static void setHttpStatusCodes(int statusCode, HttpCarbonMessage response) {
HttpHeaders httpHeaders = response.getHeaders();
httpHeaders.set(HttpHeaderNames.CONTENT_TYPE, org.wso2.transport.http.netty.contract.Constants.TEXT_PLAIN);
response.setHttpStatusCode(statusCode);
}
/**
* Get HTTP error value with a given error detail.
*
* @param errMsg Error message
* @return Error value
*/
public static ErrorValue getError(String errMsg) {
MapValue<String, Object> httpErrorRecord = createHttpErrorDetailRecord(errMsg, null);
httpErrorRecord.put(HTTP_ERROR_MESSAGE, errMsg);
return BallerinaErrors.createError(HTTP_ERROR_CODE, httpErrorRecord);
}
/**
* Get error value from throwable.
*
* @param throwable Throwable representing the error.
* @return Error struct
*/
public static ErrorValue getError(Throwable throwable) {
if (throwable instanceof ClientConnectorException) {
return createHttpError(throwable);
}
if (throwable.getMessage() == null) {
return createHttpError(IO_EXCEPTION_OCCURRED);
} else {
return createHttpError(throwable.getMessage());
}
}
public static ErrorValue createHttpError(String errorMessage) {
HttpErrorType errorType = getErrorType(errorMessage);
return createHttpError(errorMessage, errorType);
}
public static ErrorValue createHttpError(Throwable throwable) {
ErrorValue cause;
if (throwable instanceof EndpointTimeOutException) {
return createHttpError(throwable.getMessage(), HttpErrorType.IDLE_TIMEOUT_TRIGGERED);
} else if (throwable instanceof SslException) {
return createHttpError(throwable.getMessage(), HttpErrorType.SSL_ERROR);
} else if (throwable instanceof PromiseRejectedException) {
return createHttpError(throwable.getMessage(), HttpErrorType.HTTP2_CLIENT_ERROR);
} else if (throwable instanceof ConnectionTimedOutException) {
cause = createErrorCause(throwable.getMessage(),
IOConstants.ErrorCode.ConnectionTimedOut.errorCode(),
IO_PACKAGE_ID, DETAIL_RECORD_TYPE_NAME);
return createHttpError("Something wrong with the connection", HttpErrorType.GENERIC_CLIENT_ERROR, cause);
} else if (throwable instanceof ClientConnectorException) {
cause = createErrorCause(throwable.getMessage(),
IOConstants.ErrorCode.GenericError.errorCode(),
IO_PACKAGE_ID, DETAIL_RECORD_TYPE_NAME);
return createHttpError("Something wrong with the connection", HttpErrorType.GENERIC_CLIENT_ERROR, cause);
} else {
return createHttpError(throwable.getMessage(), HttpErrorType.GENERIC_CLIENT_ERROR);
}
}
public static ErrorValue createHttpError(String message, HttpErrorType errorType) {
Map<String, Object> values = new HashMap<>();
values.put(BallerinaErrors.ERROR_MESSAGE_FIELD, message);
MapValue<String, Object> detail =
BallerinaValues.createRecordValue(PROTOCOL_HTTP_PKG_ID, HTTP_ERROR_DETAIL_RECORD, values);
return BallerinaErrors.createError(errorType.getReason(), detail);
}
public static ErrorValue createHttpError(String message, HttpErrorType errorType, ErrorValue cause) {
MapValue<String, Object> detailRecord = createHttpErrorDetailRecord(message, cause);
return BallerinaErrors.createError(errorType.getReason(), detailRecord);
}
private static MapValue<String, Object> createHttpErrorDetailRecord(String message, ErrorValue cause) {
MapValue<String, Object> detail = BallerinaValues.
createRecordValue(PROTOCOL_HTTP_PKG_ID, HTTP_ERROR_DETAIL_RECORD);
return cause == null ? BallerinaValues.createRecord(detail, message) :
BallerinaValues.createRecord(detail, message, cause);
}
private static HttpErrorType getErrorType(String errorMessage) {
if (errorMessage.contains("Idle timeout triggered")) {
return HttpErrorType.IDLE_TIMEOUT_TRIGGERED;
}
switch (errorMessage) {
case REMOTE_SERVER_CLOSED_BEFORE_INITIATING_INBOUND_RESPONSE:
return HttpErrorType.INIT_INBOUND_RESPONSE_FAILED;
case REMOTE_SERVER_CLOSED_WHILE_READING_INBOUND_RESPONSE_HEADERS:
return HttpErrorType.READING_INBOUND_RESPONSE_HEADERS_FAILED;
case REMOTE_SERVER_CLOSED_WHILE_READING_INBOUND_RESPONSE_BODY:
return HttpErrorType.READING_INBOUND_RESPONSE_BODY_FAILED;
case REMOTE_SERVER_CLOSED_BEFORE_INITIATING_OUTBOUND_REQUEST:
return HttpErrorType.INIT_OUTBOUND_REQUEST_FAILED;
case REMOTE_SERVER_CLOSED_WHILE_WRITING_OUTBOUND_REQUEST_HEADERS:
return HttpErrorType.WRITING_OUTBOUND_REQUEST_HEADER_FAILED;
case REMOTE_SERVER_CLOSED_WHILE_WRITING_OUTBOUND_REQUEST_BODY:
return HttpErrorType.WRITING_OUTBOUND_REQUEST_BODY_FAILED;
case REMOTE_CLIENT_CLOSED_BEFORE_INITIATING_INBOUND_REQUEST:
return HttpErrorType.INIT_INBOUND_REQUEST_FAILED;
case REMOTE_CLIENT_CLOSED_WHILE_READING_INBOUND_REQUEST_HEADERS:
return HttpErrorType.READING_INBOUND_REQUEST_HEADER_FAILED;
case REMOTE_CLIENT_CLOSED_WHILE_READING_INBOUND_REQUEST_BODY:
return HttpErrorType.READING_INBOUND_REQUEST_BODY_FAILED;
case REMOTE_CLIENT_CLOSED_BEFORE_INITIATING_OUTBOUND_RESPONSE:
return HttpErrorType.INIT_OUTBOUND_RESPONSE_FAILED;
case REMOTE_CLIENT_CLOSED_WHILE_WRITING_OUTBOUND_RESPONSE_HEADERS:
return HttpErrorType.WRITING_OUTBOUND_RESPONSE_HEADERS_FAILED;
case REMOTE_CLIENT_CLOSED_WHILE_WRITING_OUTBOUND_RESPONSE_BODY:
return HttpErrorType.WRITING_OUTBOUND_RESPONSE_BODY_FAILED;
case REMOTE_CLIENT_CLOSED_BEFORE_INITIATING_100_CONTINUE_RESPONSE:
return HttpErrorType.INIT_100_CONTINUE_RESPONSE_FAILED;
case REMOTE_CLIENT_CLOSED_WHILE_WRITING_100_CONTINUE_RESPONSE:
return HttpErrorType.WRITING_100_CONTINUE_RESPONSE_FAILED;
case PROMISED_STREAM_REJECTED_ERROR:
return HttpErrorType.HTTP2_CLIENT_ERROR;
default:
return HttpErrorType.GENERIC_CLIENT_ERROR;
}
}
private static ErrorValue createErrorCause(String message, String reason, BPackage packageName, String recordName) {
MapValue<String, Object> detailRecordType = BallerinaValues.createRecordValue(packageName, recordName);
MapValue<String, Object> detailRecord = BallerinaValues.createRecord(detailRecordType, message, null);
return BallerinaErrors.createError(reason, detailRecord);
}
public static HttpCarbonMessage getCarbonMsg(ObjectValue objectValue, HttpCarbonMessage defaultMsg) {
HttpCarbonMessage httpCarbonMessage = (HttpCarbonMessage) objectValue.getNativeData(TRANSPORT_MESSAGE);
if (httpCarbonMessage != null) {
return httpCarbonMessage;
}
addCarbonMsg(objectValue, defaultMsg);
return defaultMsg;
}
/**
* Gets the {@code Http2PushPromise} represented by the PushPromise object.
*
* @param pushPromiseObj the push promise object
* @param defaultPushPromise the Http2PushPromise to use if the object does not have native data of a push promise
* @return the {@code Http2PushPromise} represented by the PushPromise object
*/
public static Http2PushPromise getPushPromise(ObjectValue pushPromiseObj, Http2PushPromise defaultPushPromise) {
Http2PushPromise pushPromise =
(Http2PushPromise) pushPromiseObj.getNativeData(HttpConstants.TRANSPORT_PUSH_PROMISE);
if (pushPromise != null) {
return pushPromise;
}
pushPromiseObj.addNativeData(HttpConstants.TRANSPORT_PUSH_PROMISE, defaultPushPromise);
return defaultPushPromise;
}
/**
* Populates the push promise object from native {@code Http2PushPromise}.
* @param pushPromiseObj the push promise object
* @param pushPromise the native Http2PushPromise
*/
public static void populatePushPromiseStruct(ObjectValue pushPromiseObj,
Http2PushPromise pushPromise) {
pushPromiseObj.addNativeData(HttpConstants.TRANSPORT_PUSH_PROMISE, pushPromise);
pushPromiseObj.set(HttpConstants.PUSH_PROMISE_PATH_FIELD, pushPromise.getPath());
pushPromiseObj.set(HttpConstants.PUSH_PROMISE_METHOD_FIELD, pushPromise.getMethod());
}
/**
* Creates native {@code Http2PushPromise} from PushPromise object.
*
* @param pushPromiseObj the PushPromise object
* @return the populated the native {@code Http2PushPromise}
*/
public static Http2PushPromise createHttpPushPromise(ObjectValue pushPromiseObj) {
String method = pushPromiseObj.get(HttpConstants.PUSH_PROMISE_METHOD_FIELD).toString();
if (method == null || method.isEmpty()) {
method = HttpConstants.HTTP_METHOD_GET;
}
String path = pushPromiseObj.get(HttpConstants.PUSH_PROMISE_PATH_FIELD).toString();
if (path == null || path.isEmpty()) {
path = HttpConstants.DEFAULT_BASE_PATH;
}
return new Http2PushPromise(method, path);
}
public static void addCarbonMsg(ObjectValue struct, HttpCarbonMessage httpCarbonMessage) {
struct.addNativeData(TRANSPORT_MESSAGE, httpCarbonMessage);
}
public static void populateInboundRequest(ObjectValue inboundRequest, ObjectValue entity,
HttpCarbonMessage inboundRequestMsg) {
inboundRequest.addNativeData(TRANSPORT_MESSAGE, inboundRequestMsg);
inboundRequest.addNativeData(REQUEST, true);
MapValue mutualSslRecord = ValueCreatorUtils.createHTTPRecordValue(MUTUAL_SSL_HANDSHAKE_RECORD);
mutualSslRecord.put(REQUEST_MUTUAL_SSL_HANDSHAKE_STATUS,
inboundRequestMsg.getProperty(HttpConstants.MUTUAL_SSL_RESULT));
mutualSslRecord.put(MUTUAL_SSL_CERTIFICATE, inboundRequestMsg.getProperty(HttpConstants.BASE_64_ENCODED_CERT));
inboundRequest.set(REQUEST_MUTUAL_SSL_HANDSHAKE_FIELD, mutualSslRecord);
enrichWithInboundRequestInfo(inboundRequest, inboundRequestMsg);
enrichWithInboundRequestHeaders(inboundRequest, inboundRequestMsg);
populateEntity(entity, inboundRequestMsg);
inboundRequest.set(REQUEST_ENTITY_FIELD, entity);
inboundRequest.addNativeData(IS_BODY_BYTE_CHANNEL_ALREADY_SET, false);
String cacheControlHeader = inboundRequestMsg.getHeader(CACHE_CONTROL.toString());
if (cacheControlHeader != null) {
ObjectValue cacheControlObj = ValueCreatorUtils.createRequestCacheControlObject();
RequestCacheControlObj requestCacheControl = new RequestCacheControlObj(cacheControlObj);
requestCacheControl.populateStruct(cacheControlHeader);
inboundRequest.set(REQUEST_CACHE_CONTROL_FIELD, requestCacheControl.getObj());
}
}
private static void enrichWithInboundRequestHeaders(ObjectValue inboundRequestObj,
HttpCarbonMessage inboundRequestMsg) {
if (inboundRequestMsg.getHeader(HttpHeaderNames.USER_AGENT.toString()) != null) {
String agent = inboundRequestMsg.getHeader(HttpHeaderNames.USER_AGENT.toString());
inboundRequestObj.set(HttpConstants.REQUEST_USER_AGENT_FIELD, agent);
inboundRequestMsg.removeHeader(HttpHeaderNames.USER_AGENT.toString());
}
}
private static void enrichWithInboundRequestInfo(ObjectValue inboundRequestObj,
HttpCarbonMessage inboundRequestMsg) {
inboundRequestObj.set(HttpConstants.REQUEST_RAW_PATH_FIELD, inboundRequestMsg.getRequestUrl());
inboundRequestObj.set(HttpConstants.REQUEST_METHOD_FIELD, inboundRequestMsg.getHttpMethod());
inboundRequestObj.set(HttpConstants.REQUEST_VERSION_FIELD, inboundRequestMsg.getHttpVersion());
HttpResourceArguments resourceArgValues = (HttpResourceArguments) inboundRequestMsg.getProperty(
HttpConstants.RESOURCE_ARGS);
if (resourceArgValues != null && resourceArgValues.getMap().get(HttpConstants.EXTRA_PATH_INFO) != null) {
inboundRequestObj.set(HttpConstants.REQUEST_EXTRA_PATH_INFO_FIELD,
resourceArgValues.getMap().get(HttpConstants.EXTRA_PATH_INFO));
}
}
/**
* Populates the HTTP caller with native data.
*
* @param caller Represents the HTTP caller
* @param inboundMsg Represents carbon message
* @param config Represents service endpoint configuration
*/
public static void enrichHttpCallerWithNativeData(ObjectValue caller, HttpCarbonMessage inboundMsg,
MapValue config) {
caller.addNativeData(HttpConstants.TRANSPORT_MESSAGE, inboundMsg);
caller.set(HttpConstants.HTTP_CONNECTOR_CONFIG_FIELD, config);
}
/**
* Populates the HTTP caller with connection information.
* @param httpCaller Represents the HTTP caller
* @param inboundMsg Represents the carbon message
* @param httpResource Represents the Http Resource
* @param config Represents the service endpoint configuration
*/
public static void enrichHttpCallerWithConnectionInfo(ObjectValue httpCaller, HttpCarbonMessage inboundMsg,
HttpResource httpResource, MapValue config) {
MapValue<String, Object> remote = ValueCreatorUtils.createHTTPRecordValue(HttpConstants.REMOTE);
MapValue<String, Object> local = ValueCreatorUtils.createHTTPRecordValue(HttpConstants.LOCAL);
Object remoteSocketAddress = inboundMsg.getProperty(HttpConstants.REMOTE_ADDRESS);
if (remoteSocketAddress instanceof InetSocketAddress) {
InetSocketAddress inetSocketAddress = (InetSocketAddress) remoteSocketAddress;
String remoteHost = inetSocketAddress.getHostString();
long remotePort = inetSocketAddress.getPort();
remote.put(HttpConstants.REMOTE_HOST_FIELD, remoteHost);
remote.put(HttpConstants.REMOTE_PORT_FIELD, remotePort);
}
httpCaller.set(HttpConstants.REMOTE_STRUCT_FIELD, remote);
Object localSocketAddress = inboundMsg.getProperty(HttpConstants.LOCAL_ADDRESS);
if (localSocketAddress instanceof InetSocketAddress) {
InetSocketAddress inetSocketAddress = (InetSocketAddress) localSocketAddress;
String localHost = inetSocketAddress.getHostName();
long localPort = inetSocketAddress.getPort();
local.put(HttpConstants.LOCAL_HOST_FIELD, localHost);
local.put(HttpConstants.LOCAL_PORT_FIELD, localPort);
}
httpCaller.set(HttpConstants.LOCAL_STRUCT_INDEX, local);
httpCaller.set(HttpConstants.SERVICE_ENDPOINT_PROTOCOL_FIELD, inboundMsg.getProperty(HttpConstants.PROTOCOL));
httpCaller.set(HttpConstants.SERVICE_ENDPOINT_CONFIG_FIELD, config);
httpCaller.addNativeData(HttpConstants.HTTP_SERVICE, httpResource.getParentService());
httpCaller.addNativeData(HttpConstants.REMOTE_SOCKET_ADDRESS, remoteSocketAddress);
}
/**
* Populate inbound response with headers and entity.
* @param inboundResponse Ballerina struct to represent response
* @param entity Entity of the response
* @param inboundResponseMsg Represent carbon message.
*/
public static void populateInboundResponse(ObjectValue inboundResponse, ObjectValue entity,
HttpCarbonMessage inboundResponseMsg) {
inboundResponse.addNativeData(TRANSPORT_MESSAGE, inboundResponseMsg);
int statusCode = inboundResponseMsg.getHttpStatusCode();
inboundResponse.set(RESPONSE_STATUS_CODE_FIELD, (long) statusCode);
String reasonPhrase = inboundResponseMsg.getReasonPhrase();
inboundResponse.set(RESPONSE_REASON_PHRASE_FIELD, reasonPhrase);
if (inboundResponseMsg.getHeader(HttpHeaderNames.SERVER.toString()) != null) {
inboundResponse.set(HttpConstants.RESPONSE_SERVER_FIELD,
inboundResponseMsg.getHeader(HttpHeaderNames.SERVER.toString()));
inboundResponseMsg.removeHeader(HttpHeaderNames.SERVER.toString());
}
if (inboundResponseMsg.getProperty(RESOLVED_REQUESTED_URI) != null) {
inboundResponse.set(RESOLVED_REQUESTED_URI_FIELD,
inboundResponseMsg.getProperty(RESOLVED_REQUESTED_URI).toString());
}
String cacheControlHeader = inboundResponseMsg.getHeader(CACHE_CONTROL.toString());
if (cacheControlHeader != null) {
ResponseCacheControlObj responseCacheControl = new ResponseCacheControlObj(PROTOCOL_HTTP_PKG_ID,
RESPONSE_CACHE_CONTROL);
responseCacheControl.populateStruct(cacheControlHeader);
inboundResponse.set(RESPONSE_CACHE_CONTROL_FIELD, responseCacheControl.getObj());
}
populateEntity(entity, inboundResponseMsg);
inboundResponse.set(RESPONSE_ENTITY_FIELD, entity);
inboundResponse.addNativeData(IS_BODY_BYTE_CHANNEL_ALREADY_SET, false);
}
/**
* Populate entity with headers, content-type and content-length.
*
* @param entity Represent an entity struct
* @param cMsg Represent a carbon message
*/
private static void populateEntity(ObjectValue entity, HttpCarbonMessage cMsg) {
long contentLength = -1;
String lengthStr = cMsg.getHeader(HttpHeaderNames.CONTENT_LENGTH.toString());
try {
contentLength = lengthStr != null ? Long.parseLong(lengthStr) : contentLength;
MimeUtil.setContentLength(entity, contentLength);
} catch (NumberFormatException e) {
throw createHttpError("Invalid content length", HttpErrorType.INVALID_CONTENT_LENGTH);
}
entity.addNativeData(ENTITY_HEADERS, cMsg.getHeaders());
entity.addNativeData(ENTITY_TRAILER_HEADERS, cMsg.getTrailerHeaders());
}
/**
* Set headers and properties of request/response object to the outbound transport message.
*
* @param outboundMsg transport Http carbon message.
* @param outboundMsgObj req/resp object.
*/
public static void enrichOutboundMessage(HttpCarbonMessage outboundMsg, ObjectValue outboundMsgObj) {
setHeadersToTransportMessage(outboundMsg, outboundMsgObj);
setPropertiesToTransportMessage(outboundMsg, outboundMsgObj);
}
private static void setHeadersToTransportMessage(HttpCarbonMessage outboundMsg, ObjectValue messageObj) {
boolean request = isRequest(messageObj);
ObjectValue entityObj = (ObjectValue) messageObj
.get(request ? REQUEST_ENTITY_FIELD : RESPONSE_ENTITY_FIELD);
HttpHeaders transportHeaders = outboundMsg.getHeaders();
if (request || isResponse(messageObj)) {
addRemovedPropertiesBackToHeadersMap(messageObj, transportHeaders);
}
HttpHeaders httpHeaders = (HttpHeaders) entityObj.getNativeData(ENTITY_HEADERS);
if (httpHeaders != transportHeaders) {
if (httpHeaders != null) {
transportHeaders.add(httpHeaders);
}
entityObj.addNativeData(ENTITY_HEADERS, outboundMsg.getHeaders());
}
if (!request) {
HttpHeaders transportTrailingHeaders = outboundMsg.getTrailerHeaders();
HttpHeaders trailingHeaders = (HttpHeaders) entityObj.getNativeData(ENTITY_TRAILER_HEADERS);
if (trailingHeaders != null && trailingHeaders != transportTrailingHeaders) {
transportTrailingHeaders.add(trailingHeaders);
}
}
}
private static boolean isRequest(ObjectValue value) {
return value.getType().getName().equals(REQUEST);
}
private static boolean isResponse(ObjectValue value) {
return value.getType().getName().equals(HttpConstants.RESPONSE);
}
private static void addRemovedPropertiesBackToHeadersMap(ObjectValue messageObj, HttpHeaders transportHeaders) {
if (isRequest(messageObj)) {
Object userAgent = messageObj.get(HttpConstants.REQUEST_USER_AGENT_FIELD);
if (userAgent != null && !userAgent.toString().isEmpty()) {
transportHeaders.set(HttpHeaderNames.USER_AGENT.toString(), userAgent.toString());
}
} else {
Object server = messageObj.get(HttpConstants.RESPONSE_SERVER_FIELD);
if (server != null && !server.toString().isEmpty()) {
transportHeaders.set(HttpHeaderNames.SERVER.toString(), server.toString());
}
}
}
private static void setPropertiesToTransportMessage(HttpCarbonMessage outboundResponseMsg, ObjectValue messageObj) {
if (isResponse(messageObj)) {
long statusCode = (Long) messageObj.get(RESPONSE_STATUS_CODE_FIELD);
if (statusCode != 0) {
outboundResponseMsg.setHttpStatusCode(getIntValue(statusCode));
}
Object respPhrase = messageObj.get(RESPONSE_REASON_PHRASE_FIELD);
if (respPhrase != null && !respPhrase.toString().isEmpty()) {
outboundResponseMsg.setProperty(HttpConstants.HTTP_REASON_PHRASE, respPhrase.toString());
}
}
}
/**
* Check the existence of entity. Set new entity of not present.
*
* @param value request/response struct.
*/
public static void checkEntityAvailability(ObjectValue value) {
ObjectValue entity = (ObjectValue) value.get(isRequest(value) ? REQUEST_ENTITY_FIELD : RESPONSE_ENTITY_FIELD);
if (entity == null) {
createNewEntity(value);
}
}
/**
* Check the existence of content-length and transfer-encoding headers.
*
* @param message transport message
* @return true if the headers are available else false.
*/
public static Boolean checkRequestBodySizeHeadersAvailability(HttpCarbonMessage message) {
String contentLength = message.getHeader(HttpHeaderNames.CONTENT_LENGTH.toString());
String transferEncoding = message.getHeader(HttpHeaderNames.TRANSFER_ENCODING.toString());
return contentLength != null || transferEncoding != null;
}
/**
* Check the existence of the message entity data source.
*
* @param value request/response object.
* @return true if the message entity data source is available else false.
*/
public static boolean isEntityDataSourceAvailable(ObjectValue value) {
ObjectValue entityObj = (ObjectValue) value
.get(isRequest(value) ? REQUEST_ENTITY_FIELD : RESPONSE_ENTITY_FIELD);
return (entityObj != null && EntityBodyHandler.getMessageDataSource(entityObj) != null);
}
private static void setCompressionHeaders(MapValue<String, Object> compressionConfig, HttpCarbonMessage requestMsg,
HttpCarbonMessage outboundResponseMsg) {
if (!checkConfigAnnotationAvailability(compressionConfig)) {
return;
}
String contentEncoding = outboundResponseMsg.getHeaders().get(HttpHeaderNames.CONTENT_ENCODING);
if (contentEncoding != null) {
return;
}
CompressionConfigState compressionState = getCompressionState(
compressionConfig.getStringValue(ANN_CONFIG_ATTR_COMPRESSION_ENABLE));
if (compressionState == CompressionConfigState.NEVER) {
outboundResponseMsg.getHeaders().set(HttpHeaderNames.CONTENT_ENCODING, HTTP_TRANSFER_ENCODING_IDENTITY);
return;
}
String acceptEncodingValue = requestMsg.getHeaders().get(HttpHeaderNames.ACCEPT_ENCODING);
List<String> contentTypesAnnotationValues = getAsStringList(
compressionConfig.getArrayValue(ANN_CONFIG_ATTR_COMPRESSION_CONTENT_TYPES).getStringArray());
String contentType = outboundResponseMsg.getHeader(HttpHeaderNames.CONTENT_TYPE.toString());
if (contentTypesAnnotationValues.isEmpty() || isContentTypeMatched(contentTypesAnnotationValues, contentType)) {
if (compressionState == CompressionConfigState.ALWAYS &&
(acceptEncodingValue == null || HTTP_TRANSFER_ENCODING_IDENTITY.equals(acceptEncodingValue))) {
outboundResponseMsg.getHeaders().set(HttpHeaderNames.CONTENT_ENCODING, ENCODING_GZIP);
}
} else {
outboundResponseMsg.getHeaders().set(HttpHeaderNames.CONTENT_ENCODING, HTTP_TRANSFER_ENCODING_IDENTITY);
}
}
public static CompressionConfigState getCompressionState(String compressionState) {
switch (compressionState) {
case AUTO:
return CompressionConfigState.AUTO;
case ALWAYS:
return CompressionConfigState.ALWAYS;
case NEVER:
return CompressionConfigState.NEVER;
default:
return null;
}
}
private static boolean isContentTypeMatched(List<String> contentTypes, String contentType) {
return contentType != null && contentTypes.stream().anyMatch(contentType.toLowerCase()::contains);
}
private static List<String> getAsStringList(Object[] values) {
List<String> valuesList = new ArrayList<>();
if (values == null) {
return valuesList;
}
for (Object val : values) {
valuesList.add(val.toString().trim().toLowerCase());
}
return valuesList;
}
public static String getListenerInterface(String host, int port) {
host = host != null ? host : "0.0.0.0";
return host + ":" + port;
}
public static ChunkConfig getChunkConfig(String chunkConfig) {
switch (chunkConfig) {
case HttpConstants.AUTO:
return ChunkConfig.AUTO;
case HttpConstants.ALWAYS:
return ChunkConfig.ALWAYS;
case NEVER:
return ChunkConfig.NEVER;
default:
throw new BallerinaConnectorException(
"Invalid configuration found for Transfer-Encoding: " + chunkConfig);
}
}
public static KeepAliveConfig getKeepAliveConfig(String keepAliveConfig) {
switch (keepAliveConfig) {
case HttpConstants.AUTO:
return KeepAliveConfig.AUTO;
case HttpConstants.ALWAYS:
return KeepAliveConfig.ALWAYS;
case NEVER:
return KeepAliveConfig.NEVER;
default:
throw new BallerinaConnectorException(
"Invalid configuration found for Keep-Alive: " + keepAliveConfig);
}
}
public static ForwardedExtensionConfig getForwardedExtensionConfig(String forwarded) {
ForwardedExtensionConfig forwardedConfig;
if (HttpConstants.FORWARDED_ENABLE.equalsIgnoreCase(forwarded)) {
forwardedConfig = ForwardedExtensionConfig.ENABLE;
} else if (HttpConstants.FORWARDED_TRANSITION.equalsIgnoreCase(forwarded)) {
forwardedConfig = ForwardedExtensionConfig.TRANSITION;
} else if (HttpConstants.FORWARDED_DISABLE.equalsIgnoreCase(forwarded)) {
forwardedConfig = ForwardedExtensionConfig.DISABLE;
} else {
throw new BallerinaConnectorException("Invalid configuration found for Forwarded : " + forwarded);
}
return forwardedConfig;
}
public static HttpCarbonMessage createHttpCarbonMessage(boolean isRequest) {
HttpCarbonMessage httpCarbonMessage;
if (isRequest) {
httpCarbonMessage = new HttpCarbonMessage(
new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, ""));
} else {
httpCarbonMessage = new HttpCarbonMessage(
new DefaultHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK));
}
httpCarbonMessage.completeMessage();
return httpCarbonMessage;
}
public static void checkFunctionValidity(ObjectValue connectionObj, HttpCarbonMessage reqMsg,
HttpCarbonMessage outboundResponseMsg) {
serverConnectionStructCheck(reqMsg);
int statusCode = outboundResponseMsg.getHttpStatusCode();
methodInvocationCheck(connectionObj, reqMsg, statusCode);
}
private static void methodInvocationCheck(ObjectValue connectionObj, HttpCarbonMessage reqMsg, int statusCode) {
if (connectionObj.getNativeData(METHOD_ACCESSED) != null || reqMsg == null) {
throw new IllegalStateException("illegal function invocation");
}
if (!is100ContinueRequest(reqMsg, statusCode)) {
connectionObj.addNativeData(METHOD_ACCESSED, true);
}
}
public static void serverConnectionStructCheck(HttpCarbonMessage reqMsg) {
if (reqMsg == null) {
throw createHttpError("operation not allowed:invalid Connection variable",
HttpErrorType.GENERIC_LISTENER_ERROR);
}
}
private static boolean is100ContinueRequest(HttpCarbonMessage reqMsg, int statusCode) {
return HttpConstants.HEADER_VAL_100_CONTINUE.equalsIgnoreCase(
reqMsg.getHeader(HttpHeaderNames.EXPECT.toString())) || statusCode == 100;
}
public static MapValue getTransactionConfigAnnotation(AttachedFunction resource, String transactionPackagePath) {
return (MapValue) resource.getAnnotation(transactionPackagePath,
TransactionConstants.ANN_NAME_TRX_PARTICIPANT_CONFIG);
}
private static int getIntValue(long val) {
int intVal = (int) val;
if (intVal != val) {
throw new IllegalArgumentException("invalid argument: " + val);
}
return intVal;
}
public static String getContentTypeFromTransportMessage(HttpCarbonMessage transportMessage) {
return transportMessage.getHeader(HttpHeaderNames.CONTENT_TYPE.toString());
}
/**
* If the given Content-Type header value doesn't have a boundary parameter value, get a new boundary string and
* append it to Content-Type and set it to transport message.
*
* @param transportMessage Represent transport message
* @param contentType Represent the Content-Type header value
* @return The boundary string that was extracted from header or the newly generated one
*/
public static String addBoundaryIfNotExist(HttpCarbonMessage transportMessage, String contentType) {
String boundaryValue = HeaderUtil.extractBoundaryParameter(contentType);
if (boundaryValue != null) {
boundaryValue = sanitizeBoundary(boundaryValue);
boolean validateContentType = MimeUtil.isValidateContentType(contentType);
if (!validateContentType) {
String headerValue = HeaderUtil.getHeaderValue(contentType);
MapValue<String, String> paramMap = HeaderUtil.getParamMap(contentType);
paramMap.put(BOUNDARY, MimeUtil.includeQuotes(boundaryValue));
contentType = HeaderUtil.appendHeaderParams(new StringBuilder(headerValue).append(";"), paramMap);
transportMessage.setHeader(String.valueOf(CONTENT_TYPE), contentType);
}
return boundaryValue;
}
return HttpUtil.addBoundaryParameter(transportMessage, contentType);
}
/**
* Generate a new boundary string and append it Content-Type and set that to transport message.
*
* @param transportMessage Represent transport message
* @param contentType Represent the Content-Type header value
* @return The newly generated boundary string
*/
private static String addBoundaryParameter(HttpCarbonMessage transportMessage, String contentType) {
String boundaryString = null;
if (contentType != null && contentType.startsWith(MULTIPART_AS_PRIMARY_TYPE)) {
boundaryString = MimeUtil.getNewMultipartDelimiter();
transportMessage.setHeader(HttpHeaderNames.CONTENT_TYPE.toString(), contentType + "; " + BOUNDARY + "=" +
boundaryString);
}
return boundaryString;
}
/**
* Sanitize the boundary string by removing leading and trailing double quotes.
*
* @param boundaryString Represent boundary string
* @return Sanitized boundary string
*/
private static String sanitizeBoundary(String boundaryString) {
return boundaryString.replaceAll("^\"|\"$", "");
}
public static HttpWsConnectorFactory createHttpWsConnectionFactory() {
return new DefaultHttpWsConnectorFactory();
}
public static void checkAndObserveHttpRequest(Strand strand, HttpCarbonMessage message) {
Optional<ObserverContext> observerContext = ObserveUtils.getObserverContextOfCurrentFrame(strand);
observerContext.ifPresent(ctx -> {
HttpUtil.injectHeaders(message, ObserveUtils.getContextProperties(strand.observerContext));
strand.observerContext.addTag(TAG_KEY_HTTP_METHOD, message.getHttpMethod());
if (!ConfigRegistry.getInstance().getAsBoolean(CONFIG_CLIENT_HTTP_URL_DISABLED)) {
strand.observerContext.addTag(TAG_KEY_HTTP_URL, String.valueOf(message.getProperty(HttpConstants.TO)));
}
strand.observerContext.addTag(TAG_KEY_PEER_ADDRESS,
message.getProperty(PROPERTY_HTTP_HOST) + ":" + message.getProperty(PROPERTY_HTTP_PORT));
strand.observerContext.addTag(TAG_KEY_HTTP_STATUS_CODE, Integer.toString(0));
});
}
public static void injectHeaders(HttpCarbonMessage msg, Map<String, String> headers) {
if (headers != null) {
headers.forEach((key, value) -> msg.setHeader(key, String.valueOf(value)));
}
}
private static void setChunkingHeader(String transferValue, HttpCarbonMessage outboundResponseMsg) {
if (transferValue == null) {
return;
}
outboundResponseMsg.setProperty(CHUNKING_CONFIG, getChunkConfig(transferValue));
}
/**
* Creates InResponse using the native {@code HttpCarbonMessage}.
*
* @param httpCarbonMessage the HttpCarbonMessage
* @return the Response struct
*/
public static ObjectValue createResponseStruct(HttpCarbonMessage httpCarbonMessage) {
ObjectValue responseObj = ValueCreatorUtils.createResponseObject();
ObjectValue entity = ValueCreatorUtils.createEntityObject();
HttpUtil.populateInboundResponse(responseObj, entity, httpCarbonMessage);
return responseObj;
}
public static void populateSenderConfigurations(SenderConfiguration senderConfiguration,
MapValue<String, Object> clientEndpointConfig, String scheme) {
ProxyServerConfiguration proxyServerConfiguration;
MapValue secureSocket = clientEndpointConfig.getMapValue(HttpConstants.ENDPOINT_CONFIG_SECURE_SOCKET);
String httpVersion = clientEndpointConfig.getStringValue(HttpConstants.CLIENT_EP_HTTP_VERSION);
if (secureSocket != null) {
HttpUtil.populateSSLConfiguration(senderConfiguration, secureSocket);
} else if (scheme.equals(PROTOCOL_HTTPS)) {
if (httpVersion.equals(HTTP_2_0_VERSION)) {
throw createHttpError("To enable https you need to configure secureSocket record",
HttpErrorType.SSL_ERROR);
} else {
senderConfiguration.useJavaDefaults();
}
}
if (HTTP_1_1_VERSION.equals(httpVersion)) {
MapValue<String, Object> http1Settings = (MapValue<String, Object>) clientEndpointConfig
.get(HttpConstants.HTTP1_SETTINGS);
MapValue proxy = http1Settings.getMapValue(HttpConstants.PROXY_STRUCT_REFERENCE);
if (proxy != null) {
String proxyHost = proxy.getStringValue(HttpConstants.PROXY_HOST);
int proxyPort = proxy.getIntValue(HttpConstants.PROXY_PORT).intValue();
String proxyUserName = proxy.getStringValue(HttpConstants.PROXY_USERNAME);
String proxyPassword = proxy.getStringValue(HttpConstants.PROXY_PASSWORD);
try {
proxyServerConfiguration = new ProxyServerConfiguration(proxyHost, proxyPort);
} catch (UnknownHostException e) {
throw new BallerinaConnectorException("Failed to resolve host" + proxyHost, e);
}
if (!proxyUserName.isEmpty()) {
proxyServerConfiguration.setProxyUsername(proxyUserName);
}
if (!proxyPassword.isEmpty()) {
proxyServerConfiguration.setProxyPassword(proxyPassword);
}
senderConfiguration.setProxyServerConfiguration(proxyServerConfiguration);
}
}
long timeoutMillis = clientEndpointConfig.getIntValue(HttpConstants.CLIENT_EP_ENDPOINT_TIMEOUT);
if (timeoutMillis < 0) {
senderConfiguration.setSocketIdleTimeout(0);
} else {
senderConfiguration.setSocketIdleTimeout(
validateConfig(timeoutMillis, HttpConstants.CLIENT_EP_ENDPOINT_TIMEOUT));
}
if (httpVersion != null) {
senderConfiguration.setHttpVersion(httpVersion);
}
String forwardedExtension = clientEndpointConfig.getStringValue(HttpConstants.CLIENT_EP_FORWARDED);
senderConfiguration.setForwardedExtensionConfig(HttpUtil.getForwardedExtensionConfig(forwardedExtension));
}
public static ConnectionManager getConnectionManager(MapValue<String, Long> poolStruct) {
ConnectionManager poolManager = (ConnectionManager) poolStruct.getNativeData(CONNECTION_MANAGER);
if (poolManager == null) {
synchronized (poolStruct) {
if (poolStruct.getNativeData(CONNECTION_MANAGER) == null) {
PoolConfiguration userDefinedPool = new PoolConfiguration();
populatePoolingConfig(poolStruct, userDefinedPool);
poolManager = new ConnectionManager(userDefinedPool);
poolStruct.addNativeData(CONNECTION_MANAGER, poolManager);
}
}
}
return poolManager;
}
public static void populatePoolingConfig(MapValue<String, Long> poolRecord, PoolConfiguration poolConfiguration) {
long maxActiveConnections = poolRecord.get(HttpConstants.CONNECTION_POOLING_MAX_ACTIVE_CONNECTIONS);
poolConfiguration.setMaxActivePerPool(
validateConfig(maxActiveConnections, HttpConstants.CONNECTION_POOLING_MAX_ACTIVE_CONNECTIONS));
long maxIdleConnections = poolRecord.get(HttpConstants.CONNECTION_POOLING_MAX_IDLE_CONNECTIONS);
poolConfiguration.setMaxIdlePerPool(
validateConfig(maxIdleConnections, HttpConstants.CONNECTION_POOLING_MAX_IDLE_CONNECTIONS));
long waitTime = poolRecord.get(HttpConstants.CONNECTION_POOLING_WAIT_TIME);
poolConfiguration.setMaxWaitTime(waitTime);
long maxActiveStreamsPerConnection = poolRecord.get(CONNECTION_POOLING_MAX_ACTIVE_STREAMS_PER_CONNECTION);
poolConfiguration.setHttp2MaxActiveStreamsPerConnection(
maxActiveStreamsPerConnection == -1 ? Integer.MAX_VALUE : validateConfig(maxActiveStreamsPerConnection,
CONNECTION_POOLING_MAX_ACTIVE_STREAMS_PER_CONNECTION));
}
private static int validateConfig(long value, String configName) {
try {
return Math.toIntExact(value);
} catch (ArithmeticException e) {
log.warn("The value set for the configuration needs to be less than {}. The " + configName +
"value is set to {}", Integer.MAX_VALUE);
return Integer.MAX_VALUE;
}
}
/**
* Populates SSL configuration instance with secure socket configuration.
*
* @param sslConfiguration ssl configuration instance.
* @param secureSocket secure socket configuration.
*/
public static void populateSSLConfiguration(SslConfiguration sslConfiguration, MapValue secureSocket) {
MapValue trustStore = secureSocket.getMapValue(ENDPOINT_CONFIG_TRUST_STORE);
MapValue keyStore = secureSocket.getMapValue(ENDPOINT_CONFIG_KEY_STORE);
MapValue protocols = secureSocket.getMapValue(ENDPOINT_CONFIG_PROTOCOLS);
MapValue validateCert = secureSocket.getMapValue(ENDPOINT_CONFIG_VALIDATE_CERT);
String keyFile = secureSocket.getStringValue(ENDPOINT_CONFIG_KEY);
String certFile = secureSocket.getStringValue(ENDPOINT_CONFIG_CERTIFICATE);
String trustCerts = secureSocket.getStringValue(ENDPOINT_CONFIG_TRUST_CERTIFICATES);
String keyPassword = secureSocket.getStringValue(ENDPOINT_CONFIG_KEY_PASSWORD);
boolean disableSslValidation = secureSocket.getBooleanValue(ENDPOINT_CONFIG_DISABLE_SSL);
List<Parameter> clientParams = new ArrayList<>();
if (disableSslValidation) {
sslConfiguration.disableSsl();
return;
} else if (StringUtils.isEmpty(trustCerts) && trustStore == null) {
sslConfiguration.useJavaDefaults();
return;
}
if (trustStore != null && StringUtils.isNotBlank(trustCerts)) {
throw createHttpError("Cannot configure both trustStore and trustCerts at the same time.",
HttpErrorType.SSL_ERROR);
}
if (trustStore != null) {
String trustStoreFile = trustStore.getStringValue(FILE_PATH);
if (StringUtils.isNotBlank(trustStoreFile)) {
sslConfiguration.setTrustStoreFile(trustStoreFile);
}
String trustStorePassword = trustStore.getStringValue(PASSWORD);
if (StringUtils.isNotBlank(trustStorePassword)) {
sslConfiguration.setTrustStorePass(trustStorePassword);
}
} else if (StringUtils.isNotBlank(trustCerts)) {
sslConfiguration.setClientTrustCertificates(trustCerts);
}
if (keyStore != null && StringUtils.isNotBlank(keyFile)) {
throw createHttpError("Cannot configure both keyStore and keyFile.", HttpErrorType.SSL_ERROR);
} else if (StringUtils.isNotBlank(keyFile) && StringUtils.isBlank(certFile)) {
throw createHttpError("Need to configure certFile containing client ssl certificates.",
HttpErrorType.SSL_ERROR);
}
if (keyStore != null) {
String keyStoreFile = keyStore.getStringValue(FILE_PATH);
if (StringUtils.isNotBlank(keyStoreFile)) {
sslConfiguration.setKeyStoreFile(keyStoreFile);
}
String keyStorePassword = keyStore.getStringValue(PASSWORD);
if (StringUtils.isNotBlank(keyStorePassword)) {
sslConfiguration.setKeyStorePass(keyStorePassword);
}
} else if (StringUtils.isNotBlank(keyFile)) {
sslConfiguration.setClientKeyFile(keyFile);
sslConfiguration.setClientCertificates(certFile);
if (StringUtils.isNotBlank(keyPassword)) {
sslConfiguration.setClientKeyPassword(keyPassword);
}
}
if (protocols != null) {
List<String> sslEnabledProtocolsValueList = Arrays.asList(
protocols.getArrayValue(ENABLED_PROTOCOLS).getStringArray());
if (!sslEnabledProtocolsValueList.isEmpty()) {
String sslEnabledProtocols = sslEnabledProtocolsValueList.stream()
.collect(Collectors.joining(",", "", ""));
Parameter clientProtocols = new Parameter(ANN_CONFIG_ATTR_SSL_ENABLED_PROTOCOLS, sslEnabledProtocols);
clientParams.add(clientProtocols);
}
String sslProtocol = protocols.getStringValue(SSL_PROTOCOL_VERSION);
if (StringUtils.isNotBlank(sslProtocol)) {
sslConfiguration.setSSLProtocol(sslProtocol);
}
}
if (validateCert != null) {
boolean validateCertEnabled = validateCert.getBooleanValue(HttpConstants.ENABLE);
int cacheSize = validateCert.getIntValue(HttpConstants.SSL_CONFIG_CACHE_SIZE).intValue();
int cacheValidityPeriod = validateCert.getIntValue(HttpConstants.SSL_CONFIG_CACHE_VALIDITY_PERIOD)
.intValue();
sslConfiguration.setValidateCertEnabled(validateCertEnabled);
if (cacheValidityPeriod != 0) {
sslConfiguration.setCacheValidityPeriod(cacheValidityPeriod);
}
if (cacheSize != 0) {
sslConfiguration.setCacheSize(cacheSize);
}
}
boolean hostNameVerificationEnabled = secureSocket
.getBooleanValue(HttpConstants.SSL_CONFIG_HOST_NAME_VERIFICATION_ENABLED);
boolean ocspStaplingEnabled = secureSocket.getBooleanValue(HttpConstants.ENDPOINT_CONFIG_OCSP_STAPLING);
sslConfiguration.setOcspStaplingEnabled(ocspStaplingEnabled);
sslConfiguration.setHostNameVerificationEnabled(hostNameVerificationEnabled);
sslConfiguration
.setSslSessionTimeOut((int) secureSocket.getDefaultableIntValue(ENDPOINT_CONFIG_SESSION_TIMEOUT));
sslConfiguration.setSslHandshakeTimeOut(secureSocket.getDefaultableIntValue(ENDPOINT_CONFIG_HANDSHAKE_TIMEOUT));
Object[] cipherConfigs = secureSocket.getArrayValue(HttpConstants.SSL_CONFIG_CIPHERS).getStringArray();
if (cipherConfigs != null) {
List<Object> ciphersValueList = Arrays.asList(cipherConfigs);
if (ciphersValueList.size() > 0) {
String ciphers = ciphersValueList.stream().map(Object::toString)
.collect(Collectors.joining(",", "", ""));
Parameter clientCiphers = new Parameter(HttpConstants.CIPHERS, ciphers);
clientParams.add(clientCiphers);
}
}
String enableSessionCreation = String.valueOf(secureSocket
.getBooleanValue(HttpConstants.SSL_CONFIG_ENABLE_SESSION_CREATION));
Parameter clientEnableSessionCreation = new Parameter(HttpConstants.SSL_CONFIG_ENABLE_SESSION_CREATION,
enableSessionCreation);
clientParams.add(clientEnableSessionCreation);
if (!clientParams.isEmpty()) {
sslConfiguration.setParameters(clientParams);
}
}
public static String sanitizeBasePath(String basePath) {
basePath = basePath.trim();
if (!basePath.startsWith(HttpConstants.DEFAULT_BASE_PATH)) {
basePath = HttpConstants.DEFAULT_BASE_PATH.concat(basePath);
}
if ((basePath.endsWith(HttpConstants.DEFAULT_BASE_PATH) && basePath.length() != 1)) {
basePath = basePath.substring(0, basePath.length() - 1);
}
if (basePath.endsWith("*")) {
basePath = basePath.substring(0, basePath.length() - 1);
}
return basePath;
}
/**
* Serialize outbound message.
*
* @param outboundMessageSource Represent the outbound message datasource
* @param entity Represent the entity of the outbound message
* @param messageOutputStream Represent the output stream
* @throws IOException In case an error occurs while writing to output stream
*/
public static void serializeDataSource(Object outboundMessageSource, ObjectValue entity,
OutputStream messageOutputStream) throws IOException {
if (MimeUtil.generateAsJSON(outboundMessageSource, entity)) {
JSONGenerator gen = new JSONGenerator(messageOutputStream);
gen.serialize(outboundMessageSource);
gen.flush();
} else {
serialize(outboundMessageSource, messageOutputStream);
}
}
public static void serialize(Object value, OutputStream outputStream) throws IOException {
if (value == null) {
throw createHttpError("error occurred while serializing null data");
} else if (value instanceof ArrayValue) {
if (value instanceof StreamingJsonValue) {
((StreamingJsonValue) value).serialize(outputStream);
} else {
((ArrayValue) value).serialize(outputStream);
}
} else if (value instanceof MultipartDataSource) {
((MultipartDataSource) value).serialize(outputStream);
} else if (value instanceof XMLItem) {
((XMLItem) value).serialize(outputStream);
} else if (value instanceof XMLSequence) {
((XMLSequence) value).serialize(outputStream);
} else if (value instanceof Long || value instanceof String ||
value instanceof Double || value instanceof Integer || value instanceof Boolean) {
outputStream.write(value.toString().getBytes(Charset.defaultCharset()));
} else {
((RefValue) value).serialize(outputStream);
}
}
/**
* Check the availability of an annotation.
*
* @param configAnnotation Represent the annotation
* @return True if the annotation and the annotation value are available
*/
public static boolean checkConfigAnnotationAvailability(MapValue configAnnotation) {
return configAnnotation != null;
}
/**
* Returns Listener configuration instance populated with endpoint config.
*
* @param port listener port.
* @param endpointConfig listener endpoint configuration.
* @return transport listener configuration instance.
*/
public static ListenerConfiguration getListenerConfig(long port, MapValue endpointConfig) {
String host = endpointConfig.getStringValue(HttpConstants.ENDPOINT_CONFIG_HOST);
MapValue sslConfig = endpointConfig.getMapValue(HttpConstants.ENDPOINT_CONFIG_SECURE_SOCKET);
String httpVersion = endpointConfig.getStringValue(HttpConstants.ENDPOINT_CONFIG_VERSION);
long idleTimeout = endpointConfig.getIntValue(HttpConstants.ENDPOINT_CONFIG_TIMEOUT);
ListenerConfiguration listenerConfiguration = new ListenerConfiguration();
if (HTTP_1_1_VERSION.equals(httpVersion)) {
MapValue<String, Object> http1Settings =
(MapValue<String, Object>) endpointConfig.get(HttpConstants.HTTP1_SETTINGS);
listenerConfiguration.setPipeliningLimit(http1Settings.getIntValue(HttpConstants.PIPELINING_REQUEST_LIMIT));
String keepAlive = http1Settings.getStringValue(HttpConstants.ENDPOINT_CONFIG_KEEP_ALIVE);
listenerConfiguration.setKeepAliveConfig(HttpUtil.getKeepAliveConfig(keepAlive));
}
MapValue<String, Object> requestLimits =
(MapValue<String, Object>) endpointConfig.getMapValue(HttpConstants.REQUEST_LIMITS);
setInboundMgsSizeValidationConfig(requestLimits.getIntValue(HttpConstants.MAX_URI_LENGTH),
requestLimits.getIntValue(HttpConstants.MAX_HEADER_SIZE),
requestLimits.getIntValue(HttpConstants.MAX_ENTITY_BODY_SIZE),
listenerConfiguration.getMsgSizeValidationConfig());
if (host == null || host.trim().isEmpty()) {
listenerConfiguration.setHost(ConfigRegistry.getInstance().getConfigOrDefault("b7a.http.host",
HttpConstants.HTTP_DEFAULT_HOST));
} else {
listenerConfiguration.setHost(host);
}
if (port == 0) {
throw new BallerinaConnectorException("Listener port is not defined!");
}
listenerConfiguration.setPort(Math.toIntExact(port));
if (idleTimeout < 0) {
throw new BallerinaConnectorException("Idle timeout cannot be negative. If you want to disable the " +
"timeout please use value 0");
}
listenerConfiguration.setSocketIdleTimeout(Math.toIntExact(idleTimeout));
if (httpVersion != null) {
listenerConfiguration.setVersion(httpVersion);
}
if (endpointConfig.getType().getName().equalsIgnoreCase(LISTENER_CONFIGURATION)) {
String serverName = endpointConfig.getStringValue(SERVER_NAME);
listenerConfiguration.setServerHeader(serverName != null ? serverName : getServerName());
} else {
listenerConfiguration.setServerHeader(getServerName());
}
if (sslConfig != null) {
return setSslConfig(sslConfig, listenerConfiguration);
}
listenerConfiguration.setPipeliningEnabled(true);
Object webSocketCompressionEnabled = endpointConfig.get(WebSocketConstants.COMPRESSION_ENABLED_CONFIG);
if (webSocketCompressionEnabled != null) {
listenerConfiguration.setWebSocketCompressionEnabled((Boolean) webSocketCompressionEnabled);
}
return listenerConfiguration;
}
public static void setInboundMgsSizeValidationConfig(long maxInitialLineLength, long maxHeaderSize,
long maxEntityBodySize,
InboundMsgSizeValidationConfig sizeValidationConfig) {
if (maxInitialLineLength >= 0) {
sizeValidationConfig.setMaxInitialLineLength(Math.toIntExact(maxInitialLineLength));
} else {
throw new BallerinaConnectorException(
"Invalid configuration found for max initial line length : " + maxInitialLineLength);
}
if (maxHeaderSize >= 0) {
sizeValidationConfig.setMaxHeaderSize(Math.toIntExact(maxHeaderSize));
} else {
throw new BallerinaConnectorException("Invalid configuration found for maxHeaderSize : " + maxHeaderSize);
}
if (maxEntityBodySize != -1) {
if (maxEntityBodySize >= 0) {
sizeValidationConfig.setMaxEntityBodySize(maxEntityBodySize);
} else {
throw new BallerinaConnectorException(
"Invalid configuration found for maxEntityBodySize : " + maxEntityBodySize);
}
}
}
private static String getServerName() {
String userAgent;
String version = System.getProperty(BALLERINA_VERSION);
if (version != null) {
userAgent = "ballerina/" + version;
} else {
userAgent = "ballerina";
}
return userAgent;
}
private static ListenerConfiguration setSslConfig(MapValue sslConfig, ListenerConfiguration listenerConfiguration) {
listenerConfiguration.setScheme(PROTOCOL_HTTPS);
MapValue trustStore = sslConfig.getMapValue(ENDPOINT_CONFIG_TRUST_STORE);
MapValue keyStore = sslConfig.getMapValue(ENDPOINT_CONFIG_KEY_STORE);
MapValue protocols = sslConfig.getMapValue(ENDPOINT_CONFIG_PROTOCOLS);
MapValue validateCert = sslConfig.getMapValue(ENDPOINT_CONFIG_VALIDATE_CERT);
MapValue ocspStapling = sslConfig.getMapValue(ENDPOINT_CONFIG_OCSP_STAPLING);
String keyFile = sslConfig.getStringValue(ENDPOINT_CONFIG_KEY);
String certFile = sslConfig.getStringValue(ENDPOINT_CONFIG_CERTIFICATE);
String trustCerts = sslConfig.getStringValue(ENDPOINT_CONFIG_TRUST_CERTIFICATES);
String keyPassword = sslConfig.getStringValue(ENDPOINT_CONFIG_KEY_PASSWORD);
if (keyStore != null && StringUtils.isNotBlank(keyFile)) {
throw createHttpError("Cannot configure both keyStore and keyFile at the same time.",
HttpErrorType.SSL_ERROR);
} else if (keyStore == null && (StringUtils.isBlank(keyFile) || StringUtils.isBlank(certFile))) {
throw createHttpError("Either keystore or certificateKey and server certificates must be provided "
+ "for secure connection", HttpErrorType.SSL_ERROR);
}
if (keyStore != null) {
String keyStoreFile = keyStore.getStringValue(FILE_PATH);
if (StringUtils.isBlank(keyStoreFile)) {
throw createHttpError("Keystore file location must be provided for secure connection.",
HttpErrorType.SSL_ERROR);
}
String keyStorePassword = keyStore.getStringValue(PASSWORD);
if (StringUtils.isBlank(keyStorePassword)) {
throw createHttpError("Keystore password must be provided for secure connection",
HttpErrorType.SSL_ERROR);
}
listenerConfiguration.setKeyStoreFile(keyStoreFile);
listenerConfiguration.setKeyStorePass(keyStorePassword);
} else {
listenerConfiguration.setServerKeyFile(keyFile);
listenerConfiguration.setServerCertificates(certFile);
if (StringUtils.isNotBlank(keyPassword)) {
listenerConfiguration.setServerKeyPassword(keyPassword);
}
}
String sslVerifyClient = sslConfig.getStringValue(SSL_CONFIG_SSL_VERIFY_CLIENT);
listenerConfiguration.setVerifyClient(sslVerifyClient);
listenerConfiguration
.setSslSessionTimeOut((int) sslConfig.getDefaultableIntValue(ENDPOINT_CONFIG_SESSION_TIMEOUT));
listenerConfiguration
.setSslHandshakeTimeOut(sslConfig.getDefaultableIntValue(ENDPOINT_CONFIG_HANDSHAKE_TIMEOUT));
if (trustStore == null && StringUtils.isNotBlank(sslVerifyClient) && StringUtils.isBlank(trustCerts)) {
throw createHttpError("Truststore location or trustCertificates must be provided to enable Mutual SSL",
HttpErrorType.SSL_ERROR);
}
if (trustStore != null) {
String trustStoreFile = trustStore.getStringValue(FILE_PATH);
String trustStorePassword = trustStore.getStringValue(PASSWORD);
if (StringUtils.isBlank(trustStoreFile) && StringUtils.isNotBlank(sslVerifyClient)) {
throw createHttpError("Truststore location must be provided to enable Mutual SSL",
HttpErrorType.SSL_ERROR);
}
if (StringUtils.isBlank(trustStorePassword) && StringUtils.isNotBlank(sslVerifyClient)) {
throw createHttpError("Truststore password value must be provided to enable Mutual SSL",
HttpErrorType.SSL_ERROR);
}
listenerConfiguration.setTrustStoreFile(trustStoreFile);
listenerConfiguration.setTrustStorePass(trustStorePassword);
} else if (StringUtils.isNotBlank(trustCerts)) {
listenerConfiguration.setServerTrustCertificates(trustCerts);
}
List<Parameter> serverParamList = new ArrayList<>();
Parameter serverParameters;
if (protocols != null) {
List<String> sslEnabledProtocolsValueList = Arrays.asList(
protocols.getArrayValue(ENABLED_PROTOCOLS).getStringArray());
if (!sslEnabledProtocolsValueList.isEmpty()) {
String sslEnabledProtocols = sslEnabledProtocolsValueList.stream()
.collect(Collectors.joining(",", "", ""));
serverParameters = new Parameter(ANN_CONFIG_ATTR_SSL_ENABLED_PROTOCOLS, sslEnabledProtocols);
serverParamList.add(serverParameters);
}
String sslProtocol = protocols.getStringValue(SSL_PROTOCOL_VERSION);
if (StringUtils.isNotBlank(sslProtocol)) {
listenerConfiguration.setSSLProtocol(sslProtocol);
}
}
List<String> ciphersValueList = Arrays.asList(
sslConfig.getArrayValue(HttpConstants.SSL_CONFIG_CIPHERS).getStringArray());
if (!ciphersValueList.isEmpty()) {
String ciphers = ciphersValueList.stream().collect(Collectors.joining(",", "", ""));
serverParameters = new Parameter(HttpConstants.CIPHERS, ciphers);
serverParamList.add(serverParameters);
}
if (validateCert != null) {
boolean validateCertificateEnabled = validateCert.getBooleanValue(HttpConstants.ENABLE);
long cacheSize = validateCert.getIntValue(HttpConstants.SSL_CONFIG_CACHE_SIZE);
long cacheValidationPeriod = validateCert.getIntValue(HttpConstants.SSL_CONFIG_CACHE_VALIDITY_PERIOD);
listenerConfiguration.setValidateCertEnabled(validateCertificateEnabled);
if (validateCertificateEnabled) {
if (cacheSize != 0) {
listenerConfiguration.setCacheSize(Math.toIntExact(cacheSize));
}
if (cacheValidationPeriod != 0) {
listenerConfiguration.setCacheValidityPeriod(Math.toIntExact(cacheValidationPeriod));
}
}
}
if (ocspStapling != null) {
boolean ocspStaplingEnabled = ocspStapling.getBooleanValue(HttpConstants.ENABLE);
listenerConfiguration.setOcspStaplingEnabled(ocspStaplingEnabled);
long cacheSize = ocspStapling.getIntValue(HttpConstants.SSL_CONFIG_CACHE_SIZE);
long cacheValidationPeriod = ocspStapling.getIntValue(HttpConstants.SSL_CONFIG_CACHE_VALIDITY_PERIOD);
listenerConfiguration.setValidateCertEnabled(ocspStaplingEnabled);
if (ocspStaplingEnabled) {
if (cacheSize != 0) {
listenerConfiguration.setCacheSize(Math.toIntExact(cacheSize));
}
if (cacheValidationPeriod != 0) {
listenerConfiguration.setCacheValidityPeriod(Math.toIntExact(cacheValidationPeriod));
}
}
}
listenerConfiguration.setTLSStoreType(PKCS_STORE_TYPE);
String serverEnableSessionCreation = String
.valueOf(sslConfig.getBooleanValue(SSL_CONFIG_ENABLE_SESSION_CREATION));
Parameter enableSessionCreationParam = new Parameter(SSL_CONFIG_ENABLE_SESSION_CREATION,
serverEnableSessionCreation);
serverParamList.add(enableSessionCreationParam);
if (!serverParamList.isEmpty()) {
listenerConfiguration.setParameters(serverParamList);
}
listenerConfiguration
.setId(HttpUtil.getListenerInterface(listenerConfiguration.getHost(), listenerConfiguration.getPort()));
return listenerConfiguration;
}
public static String getServiceName(ObjectValue balService) {
String serviceTypeName = balService.getType().getName();
int serviceIndex = serviceTypeName.lastIndexOf("$$service$");
return serviceTypeName.substring(0, serviceIndex);
}
public static ErrorValue createHttpError(String reason, String errorName, String reasonType, String errorMsg) {
BType detailType = BValueCreator.createRecordValue(new BPackage(PACKAGE, MODULE), HTTP_ERROR_DETAIL_RECORD)
.getType();
int mask = TypeFlags.asMask(TypeFlags.ANYDATA, TypeFlags.PURETYPE);
Set<Object> valueSpace = new HashSet<>();
valueSpace.add(reason);
return BallerinaErrors.createError(
new BErrorType(errorName, new BPackage(PACKAGE, MODULE, HTTP_MODULE_VERSION),
new BFiniteType(REASON_RECORD, valueSpace, mask), detailType),
reasonType, errorMsg);
}
private HttpUtil() {
}
} |
By returning existing here we will modify the original instance | public RevisionHistory with(ApplicationVersion revision, JobId job) {
NavigableMap<JobId, NavigableMap<RevisionId, ApplicationVersion>> development = new TreeMap<>(this.development);
NavigableMap<RevisionId, ApplicationVersion> revisions = development.computeIfAbsent(job, __ -> new TreeMap<>());
if ( ! revisions.isEmpty()) revisions.compute(revisions.lastKey(), (__, last) -> last.withoutPackage());
revisions.put(revision.id(), revision);
return new RevisionHistory(production, development);
} | NavigableMap<RevisionId, ApplicationVersion> revisions = development.computeIfAbsent(job, __ -> new TreeMap<>()); | public RevisionHistory with(ApplicationVersion revision, JobId job) {
NavigableMap<JobId, NavigableMap<RevisionId, ApplicationVersion>> development = new TreeMap<>(this.development);
NavigableMap<RevisionId, ApplicationVersion> revisions = development.computeIfAbsent(job, __ -> new TreeMap<>());
if ( ! revisions.isEmpty()) revisions.compute(revisions.lastKey(), (__, last) -> last.withoutPackage());
revisions.put(revision.id(), revision);
return new RevisionHistory(production, development);
} | class RevisionHistory {
private static final Comparator<JobId> comparator = Comparator.comparing(JobId::application).thenComparing(JobId::type);
private final NavigableMap<RevisionId, ApplicationVersion> production;
private final NavigableMap<JobId, NavigableMap<RevisionId, ApplicationVersion>> development;
private RevisionHistory(NavigableMap<RevisionId, ApplicationVersion> production,
NavigableMap<JobId, NavigableMap<RevisionId, ApplicationVersion>> development) {
this.production = production;
this.development = development;
}
public static RevisionHistory empty() {
return ofRevisions(List.of(), Map.of());
}
public static RevisionHistory ofRevisions(Collection<ApplicationVersion> productionRevisions,
Map<JobId, ? extends Collection<ApplicationVersion>> developmentRevisions) {
NavigableMap<RevisionId, ApplicationVersion> production = new TreeMap<>();
for (ApplicationVersion revision : productionRevisions)
production.put(revision.id(), revision);
NavigableMap<JobId, NavigableMap<RevisionId, ApplicationVersion>> development = new TreeMap<>(comparator);
developmentRevisions.forEach((job, jobRevisions) -> {
NavigableMap<RevisionId, ApplicationVersion> revisions = development.computeIfAbsent(job, __ -> new TreeMap<>());
for (ApplicationVersion revision : jobRevisions)
revisions.put(revision.id(), revision);
});
return new RevisionHistory(production, development);
}
/** Returns a copy of this with given production revision forgotten. */
public RevisionHistory without(RevisionId id) {
if ( ! production.containsKey(id)) return this;
TreeMap<RevisionId, ApplicationVersion> production = new TreeMap<>(this.production);
production.remove(id);
return new RevisionHistory(production, development);
}
/** Returns a copy of this with the given development revision forgotten. */
public RevisionHistory without(RevisionId id, JobId job) {
if ( ! development.containsKey(job) || ! development.get(job).containsKey(id)) return this;
NavigableMap<JobId, NavigableMap<RevisionId, ApplicationVersion>> development = new TreeMap<>(this.development);
development.get(job).remove(id);
return new RevisionHistory(production, development);
}
/** Returns a copy of this with the production revision added or updated */
public RevisionHistory with(ApplicationVersion revision) {
NavigableMap<RevisionId, ApplicationVersion> production = new TreeMap<>(this.production);
production.put(revision.id(), revision);
return new RevisionHistory(production, development);
}
/** Returns a copy of this with the new development revision added, and the previous version without a package. */
private static ApplicationVersion revisionOf(RevisionId id, boolean production) {
return new ApplicationVersion(Optional.empty(), OptionalLong.of(id.number()), Optional.empty(),
Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(),
! production, Optional.empty(), false, false);
}
/** Returns the production {@link ApplicationVersion} with this revision ID. */
public ApplicationVersion get(RevisionId id) {
return production.getOrDefault(id, revisionOf(id, true));
}
/** Returns the development {@link ApplicationVersion} for the give job, with this revision ID. */
public ApplicationVersion get(RevisionId id, JobId job) {
return development.getOrDefault(job, Collections.emptyNavigableMap())
.getOrDefault(id, revisionOf(id, false));
}
/** Returns the last submitted production build. */
public Optional<ApplicationVersion> last() {
return Optional.ofNullable(production.lastEntry()).map(Map.Entry::getValue);
}
/** Returns all known production revisions we still have the package for, from oldest to newest. */
public List<ApplicationVersion> withPackage() {
return production.values().stream()
.filter(ApplicationVersion::hasPackage)
.collect(toList());
}
/** Returns the currently deployable revisions of the application. */
public Deque<ApplicationVersion> deployable(boolean ascending) {
Deque<ApplicationVersion> versions = new ArrayDeque<>();
String previousHash = "";
for (ApplicationVersion version : withPackage()) {
if (version.isDeployable() && (version.bundleHash().isEmpty() || ! previousHash.equals(version.bundleHash().get()))) {
if (ascending) versions.addLast(version);
else versions.addFirst(version);
}
previousHash = version.bundleHash().orElse("");
}
return versions;
}
/** All known production revisions, in ascending order. */
public List<ApplicationVersion> production() {
return List.copyOf(production.values());
}
/* All known development revisions, in ascending order, per job. */
public NavigableMap<JobId, List<ApplicationVersion>> development() {
NavigableMap<JobId, List<ApplicationVersion>> copy = new TreeMap<>(comparator);
development.forEach((job, revisions) -> copy.put(job, List.copyOf(revisions.values())));
return Collections.unmodifiableNavigableMap(copy);
}
} | class RevisionHistory {
private static final Comparator<JobId> comparator = Comparator.comparing(JobId::application).thenComparing(JobId::type);
private final NavigableMap<RevisionId, ApplicationVersion> production;
private final NavigableMap<JobId, NavigableMap<RevisionId, ApplicationVersion>> development;
private RevisionHistory(NavigableMap<RevisionId, ApplicationVersion> production,
NavigableMap<JobId, NavigableMap<RevisionId, ApplicationVersion>> development) {
this.production = production;
this.development = development;
}
public static RevisionHistory empty() {
return ofRevisions(List.of(), Map.of());
}
public static RevisionHistory ofRevisions(Collection<ApplicationVersion> productionRevisions,
Map<JobId, ? extends Collection<ApplicationVersion>> developmentRevisions) {
NavigableMap<RevisionId, ApplicationVersion> production = new TreeMap<>();
for (ApplicationVersion revision : productionRevisions)
production.put(revision.id(), revision);
NavigableMap<JobId, NavigableMap<RevisionId, ApplicationVersion>> development = new TreeMap<>(comparator);
developmentRevisions.forEach((job, jobRevisions) -> {
NavigableMap<RevisionId, ApplicationVersion> revisions = development.computeIfAbsent(job, __ -> new TreeMap<>());
for (ApplicationVersion revision : jobRevisions)
revisions.put(revision.id(), revision);
});
return new RevisionHistory(production, development);
}
/** Returns a copy of this without any production revisions older than the given. */
public RevisionHistory withoutOlderThan(RevisionId id) {
if (production.headMap(id).isEmpty()) return this;
return new RevisionHistory(production.tailMap(id, true), development);
}
/** Returns a copy of this without any development revisions older than the given. */
public RevisionHistory withoutOlderThan(RevisionId id, JobId job) {
if ( ! development.containsKey(job) || development.get(job).headMap(id).isEmpty()) return this;
NavigableMap<JobId, NavigableMap<RevisionId, ApplicationVersion>> development = new TreeMap<>(this.development);
development.compute(job, (__, revisions) -> revisions.tailMap(id, true));
return new RevisionHistory(production, development);
}
/** Returns a copy of this with the production revision added or updated */
public RevisionHistory with(ApplicationVersion revision) {
NavigableMap<RevisionId, ApplicationVersion> production = new TreeMap<>(this.production);
production.put(revision.id(), revision);
return new RevisionHistory(production, development);
}
/** Returns a copy of this with the new development revision added, and the previous version without a package. */
private static ApplicationVersion revisionOf(RevisionId id, boolean production) {
return new ApplicationVersion(Optional.empty(), OptionalLong.of(id.number()), Optional.empty(),
Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(),
! production, Optional.empty(), false, false);
}
/** Returns the production {@link ApplicationVersion} with this revision ID. */
public ApplicationVersion get(RevisionId id) {
return production.getOrDefault(id, revisionOf(id, true));
}
/** Returns the development {@link ApplicationVersion} for the give job, with this revision ID. */
public ApplicationVersion get(RevisionId id, JobId job) {
return development.getOrDefault(job, Collections.emptyNavigableMap())
.getOrDefault(id, revisionOf(id, false));
}
/** Returns the last submitted production build. */
public Optional<ApplicationVersion> last() {
return Optional.ofNullable(production.lastEntry()).map(Map.Entry::getValue);
}
/** Returns all known production revisions we still have the package for, from oldest to newest. */
public List<ApplicationVersion> withPackage() {
return production.values().stream()
.filter(ApplicationVersion::hasPackage)
.collect(toList());
}
/** Returns the currently deployable revisions of the application. */
public Deque<ApplicationVersion> deployable(boolean ascending) {
Deque<ApplicationVersion> versions = new ArrayDeque<>();
String previousHash = "";
for (ApplicationVersion version : withPackage()) {
if (version.isDeployable() && (version.bundleHash().isEmpty() || ! previousHash.equals(version.bundleHash().get()))) {
if (ascending) versions.addLast(version);
else versions.addFirst(version);
}
previousHash = version.bundleHash().orElse("");
}
return versions;
}
/** All known production revisions, in ascending order. */
public List<ApplicationVersion> production() {
return List.copyOf(production.values());
}
/* All known development revisions, in ascending order, per job. */
public NavigableMap<JobId, List<ApplicationVersion>> development() {
NavigableMap<JobId, List<ApplicationVersion>> copy = new TreeMap<>(comparator);
development.forEach((job, revisions) -> copy.put(job, List.copyOf(revisions.values())));
return Collections.unmodifiableNavigableMap(copy);
}
} |
Do we need this separate variable now? We can directly set to `errorType.detailType` at L961 and L963, right? | public BLangNode transform(ErrorTypeDescriptorNode errorTypeDescriptorNode) {
BLangErrorType errorType = (BLangErrorType) TreeBuilder.createErrorTypeNode();
Optional<TypeParameterNode> typeParam = errorTypeDescriptorNode.errorTypeParamsNode();
errorType.pos = getPosition(errorTypeDescriptorNode);
if (typeParam.isPresent()) {
TypeParameterNode typeNode = typeParam.get();
BLangType detail;
if (isAnonymousTypeNode(typeNode)) {
detail = deSugarTypeAsUserDefType(createTypeNode(typeNode));
} else {
detail = createTypeNode(typeNode);
}
errorType.detailType = detail;
NonTerminalNode parent = errorTypeDescriptorNode.parent();
if (parent.kind() == SyntaxKind.DISTINCT_TYPE_DESC) {
parent = parent.parent();
}
if (parent.kind() != SyntaxKind.TYPE_DEFINITION) {
return deSugarTypeAsUserDefType(errorType);
}
}
return errorType;
} | BLangType detail; | public BLangNode transform(ErrorTypeDescriptorNode errorTypeDescriptorNode) {
BLangErrorType errorType = (BLangErrorType) TreeBuilder.createErrorTypeNode();
Optional<TypeParameterNode> typeParam = errorTypeDescriptorNode.errorTypeParamsNode();
errorType.pos = getPosition(errorTypeDescriptorNode);
if (typeParam.isPresent()) {
TypeParameterNode typeNode = typeParam.get();
if (isAnonymousTypeNode(typeNode)) {
errorType.detailType = deSugarTypeAsUserDefType(createTypeNode(typeNode));
} else {
errorType.detailType = createTypeNode(typeNode);
}
NonTerminalNode parent = errorTypeDescriptorNode.parent();
if (parent.kind() == SyntaxKind.DISTINCT_TYPE_DESC) {
parent = parent.parent();
}
if (parent.kind() != SyntaxKind.TYPE_DEFINITION) {
return deSugarTypeAsUserDefType(errorType);
}
}
return errorType;
} | class BLangNodeTransformer extends NodeTransformer<BLangNode> {
private static final String IDENTIFIER_LITERAL_PREFIX = "'";
private BLangDiagnosticLog dlog;
private SymbolTable symTable;
private PackageCache packageCache;
private PackageID packageID;
private String currentCompUnitName;
private BLangCompilationUnit currentCompilationUnit;
private BLangAnonymousModelHelper anonymousModelHelper;
private BLangMissingNodesHelper missingNodesHelper;
/* To keep track of additional statements produced from multi-BLangNode resultant transformations */
private Stack<BLangStatement> additionalStatements = new Stack<>();
/* To keep track if we are inside a block statment for the use of type definition creation */
private boolean isInLocalContext = false;
public BLangNodeTransformer(CompilerContext context,
PackageID packageID, String entryName) {
this.dlog = BLangDiagnosticLog.getInstance(context);
this.dlog.setCurrentPackageId(packageID);
this.symTable = SymbolTable.getInstance(context);
this.packageID = packageID;
this.currentCompUnitName = entryName;
this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context);
this.missingNodesHelper = BLangMissingNodesHelper.getInstance(context);
}
public List<org.ballerinalang.model.tree.Node> accept(Node node) {
BLangNode bLangNode = node.apply(this);
List<org.ballerinalang.model.tree.Node> nodes = new ArrayList<>();
while (!additionalStatements.empty()) {
nodes.add(additionalStatements.pop());
}
nodes.add(bLangNode);
return nodes;
}
@Override
public BLangNode transform(IdentifierToken identifierToken) {
return this.createIdentifier(getPosition(identifierToken), identifierToken);
}
private Optional<Node> getDocumentationString(Optional<MetadataNode> metadataNode) {
return metadataNode.map(MetadataNode::documentationString).orElse(null);
}
private NodeList<AnnotationNode> getAnnotations(Optional<MetadataNode> metadataNode) {
return metadataNode.map(MetadataNode::annotations).orElse(null);
}
private Location getPosition(Node node) {
if (node == null) {
return null;
}
LineRange lineRange = node.lineRange();
LinePosition startPos = lineRange.startLine();
LinePosition endPos = lineRange.endLine();
return new BLangDiagnosticLocation(currentCompUnitName,
startPos.line(),
endPos.line(),
startPos.offset(),
endPos.offset());
}
private Location getPosition(Node startNode, Node endNode) {
if (startNode == null || endNode == null) {
return null;
}
LinePosition startPos = startNode.lineRange().startLine();
LinePosition endPos = endNode.lineRange().endLine();
return new BLangDiagnosticLocation(currentCompUnitName, startPos.line(), endPos.line(),
startPos.offset(), endPos.offset());
}
private Location getPositionWithoutMetadata(Node node) {
if (node == null) {
return null;
}
LineRange nodeLineRange = node.lineRange();
NonTerminalNode nonTerminalNode = (NonTerminalNode) node;
ChildNodeList children = nonTerminalNode.children();
LinePosition startPos;
if (children.get(0).kind() == SyntaxKind.METADATA) {
startPos = children.get(1).lineRange().startLine();
} else {
startPos = nodeLineRange.startLine();
}
LinePosition endPos = nodeLineRange.endLine();
return new BLangDiagnosticLocation(currentCompUnitName,
startPos.line(),
endPos.line(),
startPos.offset(),
endPos.offset());
}
@Override
public BLangNode transform(ModulePartNode modulePart) {
BLangCompilationUnit compilationUnit = (BLangCompilationUnit) TreeBuilder.createCompilationUnit();
this.currentCompilationUnit = compilationUnit;
compilationUnit.name = currentCompUnitName;
compilationUnit.setPackageID(packageID);
Location pos = getPosition(modulePart);
for (ImportDeclarationNode importDecl : modulePart.imports()) {
BLangImportPackage bLangImport = (BLangImportPackage) importDecl.apply(this);
bLangImport.compUnit = this.createIdentifier(pos, compilationUnit.getName());
compilationUnit.addTopLevelNode(bLangImport);
}
for (ModuleMemberDeclarationNode member : modulePart.members()) {
compilationUnit.addTopLevelNode((TopLevelNode) member.apply(this));
}
Location newLocation = new BLangDiagnosticLocation(pos.lineRange().filePath(), 0, 0, 0, 0);
compilationUnit.pos = newLocation;
compilationUnit.setPackageID(packageID);
this.currentCompilationUnit = null;
return compilationUnit;
}
@Override
public BLangNode transform(ModuleVariableDeclarationNode modVarDeclrNode) {
TypedBindingPatternNode typedBindingPattern = modVarDeclrNode.typedBindingPattern();
BindingPatternNode bindingPatternNode = typedBindingPattern.bindingPattern();
BLangVariable variable = getBLangVariableNode(bindingPatternNode);
if (modVarDeclrNode.visibilityQualifier().isPresent()) {
markVariableWithFlag(variable, Flag.PUBLIC);
}
initializeBLangVariable(variable, typedBindingPattern.typeDescriptor(), modVarDeclrNode.initializer(),
modVarDeclrNode.qualifiers());
NodeList<AnnotationNode> annotations = getAnnotations(modVarDeclrNode.metadata());
if (annotations != null) {
variable.annAttachments = applyAll(annotations);
}
variable.pos = getPositionWithoutMetadata(modVarDeclrNode);
variable.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(modVarDeclrNode.metadata()));
return variable;
}
@Override
public BLangNode transform(ImportDeclarationNode importDeclaration) {
ImportOrgNameNode orgNameNode = importDeclaration.orgName().orElse(null);
Optional<ImportPrefixNode> prefixNode = importDeclaration.prefix();
Token prefix = prefixNode.isPresent() ? prefixNode.get().prefix() : null;
Token orgName = null;
if (orgNameNode != null) {
orgName = orgNameNode.orgName();
}
String version = null;
List<BLangIdentifier> pkgNameComps = new ArrayList<>();
NodeList<IdentifierToken> names = importDeclaration.moduleName();
Location position = getPosition(importDeclaration);
names.forEach(name -> pkgNameComps.add(this.createIdentifier(getPosition(name), name.text(), null)));
BLangImportPackage importDcl = (BLangImportPackage) TreeBuilder.createImportPackageNode();
importDcl.pos = position;
importDcl.pkgNameComps = pkgNameComps;
importDcl.orgName = this.createIdentifier(getPosition(orgNameNode), orgName);
importDcl.version = this.createIdentifier(null, version);
importDcl.alias = (prefix != null) ? this.createIdentifier(getPosition(prefix), prefix)
: pkgNameComps.get(pkgNameComps.size() - 1);
return importDcl;
}
@Override
public BLangNode transform(MethodDeclarationNode methodDeclarationNode) {
BLangFunction bLFunction;
if (methodDeclarationNode.relativeResourcePath().isEmpty()) {
bLFunction = createFunctionNode(methodDeclarationNode.methodName(),
methodDeclarationNode.qualifierList(), methodDeclarationNode.methodSignature(), null);
} else {
bLFunction = createResourceFunctionNode(methodDeclarationNode.methodName(),
methodDeclarationNode.qualifierList(), methodDeclarationNode.relativeResourcePath(),
methodDeclarationNode.methodSignature(), null);
}
bLFunction.annAttachments = applyAll(getAnnotations(methodDeclarationNode.metadata()));
bLFunction.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(methodDeclarationNode.metadata()));
bLFunction.pos = getPositionWithoutMetadata(methodDeclarationNode);
return bLFunction;
}
@Override
public BLangNode transform(ResourcePathParameterNode resourcePathParameterNode) {
BLangSimpleVariable pathParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
pathParam.name = createIdentifier(resourcePathParameterNode.paramName());
BLangType typeNode = (BLangType) resourcePathParameterNode.typeDescriptor().apply(this);
pathParam.pos = getPosition(resourcePathParameterNode);
pathParam.annAttachments = applyAll(resourcePathParameterNode.annotations());
if (resourcePathParameterNode.kind() == SyntaxKind.RESOURCE_PATH_REST_PARAM) {
BLangArrayType arrayTypeNode = (BLangArrayType) TreeBuilder.createArrayTypeNode();
arrayTypeNode.elemtype = typeNode;
arrayTypeNode.dimensions = 1;
typeNode = arrayTypeNode;
}
pathParam.typeNode = typeNode;
return pathParam;
}
private BLangFunction createResourceFunctionNode(IdentifierToken accessorName,
NodeList<Token> qualifierList,
NodeList<Node> relativeResourcePath,
FunctionSignatureNode methodSignature,
FunctionBodyNode functionBody) {
BLangResourceFunction bLFunction = (BLangResourceFunction) TreeBuilder.createResourceFunctionNode();
String resourceFuncName = calculateResourceFunctionName(accessorName, relativeResourcePath);
BLangIdentifier name = createIdentifier(getPosition(accessorName), resourceFuncName);
populateFunctionNode(name, qualifierList, methodSignature, functionBody, bLFunction);
bLFunction.methodName = createIdentifier(accessorName);
bLFunction.resourcePath = new ArrayList<>();
List<BLangSimpleVariable> params = new ArrayList<>();
for (Node pathSegment : relativeResourcePath) {
switch (pathSegment.kind()) {
case SLASH_TOKEN:
continue;
case RESOURCE_PATH_SEGMENT_PARAM:
BLangSimpleVariable param = (BLangSimpleVariable) pathSegment.apply(this);
params.add(param);
bLFunction.addPathParam(param);
bLFunction.resourcePath.add(createIdentifier(getPosition(pathSegment), "*"));
break;
case RESOURCE_PATH_REST_PARAM:
BLangSimpleVariable restParam = (BLangSimpleVariable) pathSegment.apply(this);
params.add(restParam);
bLFunction.setRestPathParam(restParam);
bLFunction.resourcePath.add(createIdentifier(getPosition(pathSegment), "**"));
break;
default:
bLFunction.resourcePath.add(createIdentifier((Token) pathSegment));
break;
}
}
bLFunction.getParameters().addAll(0, params);
return bLFunction;
}
private String calculateResourceFunctionName(IdentifierToken accessorName, NodeList<Node> relativeResourcePath) {
StringBuilder sb = new StringBuilder();
sb.append("$");
sb.append(createIdentifier(accessorName).getValue());
for (Node token : relativeResourcePath) {
switch (token.kind()) {
case SLASH_TOKEN:
continue;
case RESOURCE_PATH_SEGMENT_PARAM:
sb.append("$*");
break;
case RESOURCE_PATH_REST_PARAM:
sb.append("$**");
break;
default:
sb.append("$");
String value = createIdentifier((Token) token).getValue();
sb.append(value);
}
}
return sb.toString();
}
@Override
public BLangNode transform(ConstantDeclarationNode constantDeclarationNode) {
BLangConstant constantNode = (BLangConstant) TreeBuilder.createConstantNode();
Location pos = getPositionWithoutMetadata(constantDeclarationNode);
Location identifierPos = getPosition(constantDeclarationNode.variableName());
constantNode.name = createIdentifier(identifierPos, constantDeclarationNode.variableName());
constantNode.expr = createExpression(constantDeclarationNode.initializer());
constantNode.pos = pos;
if (constantDeclarationNode.typeDescriptor().isPresent()) {
constantNode.typeNode = createTypeNode(constantDeclarationNode.typeDescriptor().orElse(null));
}
constantNode.annAttachments = applyAll(getAnnotations(constantDeclarationNode.metadata()));
constantNode.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(constantDeclarationNode.metadata()));
constantNode.flagSet.add(Flag.CONSTANT);
if (constantDeclarationNode.visibilityQualifier().isPresent() &&
constantDeclarationNode.visibilityQualifier().orElse(null).kind() == SyntaxKind.PUBLIC_KEYWORD) {
constantNode.flagSet.add(Flag.PUBLIC);
}
NodeKind nodeKind = constantNode.expr.getKind();
if (nodeKind == NodeKind.LITERAL || nodeKind == NodeKind.NUMERIC_LITERAL) {
BLangLiteral literal = nodeKind == NodeKind.LITERAL ?
(BLangLiteral) TreeBuilder.createLiteralExpression() :
(BLangLiteral) TreeBuilder.createNumericLiteralExpression();
literal.setValue(((BLangLiteral) constantNode.expr).value);
literal.type = constantNode.expr.type;
literal.isConstant = true;
BLangFiniteTypeNode finiteTypeNode = (BLangFiniteTypeNode) TreeBuilder.createFiniteTypeNode();
finiteTypeNode.valueSpace.add(literal);
BLangTypeDefinition typeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
String genName = anonymousModelHelper.getNextAnonymousTypeKey(packageID);
IdentifierNode anonTypeGenName = createIdentifier(identifierPos, genName);
typeDef.setName(anonTypeGenName);
typeDef.flagSet.add(Flag.PUBLIC);
typeDef.flagSet.add(Flag.ANONYMOUS);
typeDef.typeNode = finiteTypeNode;
typeDef.pos = pos;
constantNode.associatedTypeDefinition = typeDef;
}
return constantNode;
}
public BLangNode transform(TypeDefinitionNode typeDefNode) {
BLangTypeDefinition typeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
BLangIdentifier identifierNode =
this.createIdentifier(typeDefNode.typeName());
typeDef.setName(identifierNode);
typeDef.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(typeDefNode.metadata()));
typeDef.typeNode = createTypeNode(typeDefNode.typeDescriptor());
typeDefNode.visibilityQualifier().ifPresent(visibilityQual -> {
if (visibilityQual.kind() == SyntaxKind.PUBLIC_KEYWORD) {
typeDef.flagSet.add(Flag.PUBLIC);
}
});
typeDef.pos = getPositionWithoutMetadata(typeDefNode);
typeDef.annAttachments = applyAll(getAnnotations(typeDefNode.metadata()));
return typeDef;
}
@Override
public BLangNode transform(UnionTypeDescriptorNode unionTypeDescriptorNode) {
List<TypeDescriptorNode> nodes = flattenUnionType(unionTypeDescriptorNode);
List<TypeDescriptorNode> finiteTypeElements = new ArrayList<>();
List<List<TypeDescriptorNode>> unionTypeElementsCollection = new ArrayList<>();
for (TypeDescriptorNode type : nodes) {
if (type.kind() == SyntaxKind.SINGLETON_TYPE_DESC) {
finiteTypeElements.add(type);
unionTypeElementsCollection.add(new ArrayList<>());
} else {
List<TypeDescriptorNode> lastOfOthers;
if (unionTypeElementsCollection.isEmpty()) {
lastOfOthers = new ArrayList<>();
unionTypeElementsCollection.add(lastOfOthers);
} else {
lastOfOthers = unionTypeElementsCollection.get(unionTypeElementsCollection.size() - 1);
}
lastOfOthers.add(type);
}
}
List<TypeDescriptorNode> unionElements = new ArrayList<>();
reverseFlatMap(unionTypeElementsCollection, unionElements);
BLangFiniteTypeNode bLangFiniteTypeNode = (BLangFiniteTypeNode) TreeBuilder.createFiniteTypeNode();
for (TypeDescriptorNode finiteTypeEl : finiteTypeElements) {
SingletonTypeDescriptorNode singletonTypeNode = (SingletonTypeDescriptorNode) finiteTypeEl;
BLangLiteral literal = createSimpleLiteral(singletonTypeNode.simpleContExprNode(), true);
bLangFiniteTypeNode.addValue(literal);
}
if (unionElements.isEmpty()) {
return bLangFiniteTypeNode;
}
BLangUnionTypeNode unionTypeNode = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode();
unionTypeNode.pos = getPosition(unionTypeDescriptorNode);
for (TypeDescriptorNode unionElement : unionElements) {
unionTypeNode.memberTypeNodes.add(createTypeNode(unionElement));
}
if (!finiteTypeElements.isEmpty()) {
unionTypeNode.memberTypeNodes.add(deSugarTypeAsUserDefType(bLangFiniteTypeNode));
}
return unionTypeNode;
}
private List<TypeDescriptorNode> flattenUnionType(UnionTypeDescriptorNode unionTypeDescriptorNode) {
List<TypeDescriptorNode> list = new ArrayList<>();
list.add(unionTypeDescriptorNode.leftTypeDesc());
while (unionTypeDescriptorNode.rightTypeDesc().kind() == SyntaxKind.UNION_TYPE_DESC) {
unionTypeDescriptorNode = (UnionTypeDescriptorNode) unionTypeDescriptorNode.rightTypeDesc();
list.add(unionTypeDescriptorNode.leftTypeDesc());
}
list.add(unionTypeDescriptorNode.rightTypeDesc());
return list;
}
private <T> void reverseFlatMap(List<List<T>> listOfLists, List<T> result) {
for (int i = listOfLists.size() - 1; i >= 0; i--) {
result.addAll(listOfLists.get(i));
}
}
private BLangUserDefinedType deSugarTypeAsUserDefType(BLangType toIndirect) {
BLangTypeDefinition bLTypeDef = createTypeDefinitionWithTypeNode(toIndirect);
Location pos = toIndirect.pos;
addToTop(bLTypeDef);
return createUserDefinedType(pos, (BLangIdentifier) TreeBuilder.createIdentifierNode(), bLTypeDef.name);
}
private BLangTypeDefinition createTypeDefinitionWithTypeNode(BLangType toIndirect) {
Location pos = toIndirect.pos;
BLangTypeDefinition bLTypeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
String genName = anonymousModelHelper.getNextAnonymousTypeKey(packageID);
IdentifierNode anonTypeGenName = createIdentifier(symTable.builtinPos, genName);
bLTypeDef.setName(anonTypeGenName);
bLTypeDef.flagSet.add(Flag.PUBLIC);
bLTypeDef.flagSet.add(Flag.ANONYMOUS);
bLTypeDef.typeNode = toIndirect;
bLTypeDef.pos = pos;
return bLTypeDef;
}
@Override
public BLangNode transform(ParenthesisedTypeDescriptorNode parenthesisedTypeDescriptorNode) {
BLangType typeNode = createTypeNode(parenthesisedTypeDescriptorNode.typedesc());
typeNode.grouped = true;
return typeNode;
}
@Override
public BLangNode transform(TypeParameterNode typeParameterNode) {
return createTypeNode(typeParameterNode.typeNode());
}
@Override
public BLangNode transform(TupleTypeDescriptorNode tupleTypeDescriptorNode) {
BLangTupleTypeNode tupleTypeNode = (BLangTupleTypeNode) TreeBuilder.createTupleTypeNode();
SeparatedNodeList<Node> types = tupleTypeDescriptorNode.memberTypeDesc();
for (int i = 0; i < types.size(); i++) {
Node node = types.get(i);
if (node.kind() == SyntaxKind.REST_TYPE) {
RestDescriptorNode restDescriptor = (RestDescriptorNode) node;
tupleTypeNode.restParamType = createTypeNode(restDescriptor.typeDescriptor());
} else {
tupleTypeNode.memberTypeNodes.add(createTypeNode(node));
}
}
tupleTypeNode.pos = getPosition(tupleTypeDescriptorNode);
return tupleTypeNode;
}
@Override
private boolean isAnonymousTypeNode(TypeParameterNode typeNode) {
SyntaxKind paramKind = typeNode.typeNode().kind();
if (paramKind == SyntaxKind.RECORD_TYPE_DESC || paramKind == SyntaxKind.OBJECT_TYPE_DESC
|| paramKind == SyntaxKind.ERROR_TYPE_DESC) {
return checkIfAnonymous(typeNode);
}
return false;
}
@Override
public BLangNode transform(DistinctTypeDescriptorNode distinctTypeDesc) {
BLangType typeNode = createTypeNode(distinctTypeDesc.typeDescriptor());
typeNode.flagSet.add(Flag.DISTINCT);
return typeNode;
}
@Override
public BLangNode transform(ObjectTypeDescriptorNode objTypeDescNode) {
BLangObjectTypeNode objectTypeNode = (BLangObjectTypeNode) TreeBuilder.createObjectTypeNode();
for (Token qualifier : objTypeDescNode.objectTypeQualifiers()) {
SyntaxKind kind = qualifier.kind();
if (kind == SyntaxKind.CLIENT_KEYWORD) {
objectTypeNode.flagSet.add(Flag.CLIENT);
continue;
}
if (kind == SyntaxKind.SERVICE_KEYWORD) {
objectTypeNode.flagSet.add(SERVICE);
continue;
}
if (kind == SyntaxKind.ISOLATED_KEYWORD) {
objectTypeNode.flagSet.add(ISOLATED);
continue;
}
throw new RuntimeException("Syntax kind is not supported: " + kind);
}
NodeList<Node> members = objTypeDescNode.members();
for (Node node : members) {
BLangNode bLangNode = node.apply(this);
if (bLangNode.getKind() == NodeKind.FUNCTION) {
BLangFunction bLangFunction = (BLangFunction) bLangNode;
bLangFunction.attachedFunction = true;
bLangFunction.flagSet.add(Flag.ATTACHED);
if (Names.USER_DEFINED_INIT_SUFFIX.value.equals(bLangFunction.name.value)) {
if (objectTypeNode.initFunction == null) {
bLangFunction.objInitFunction = true;
objectTypeNode.initFunction = bLangFunction;
} else {
objectTypeNode.addFunction(bLangFunction);
}
} else {
objectTypeNode.addFunction(bLangFunction);
}
} else if (bLangNode.getKind() == NodeKind.RESOURCE_FUNC) {
BLangFunction bLangFunction = (BLangFunction) bLangNode;
bLangFunction.attachedFunction = true;
bLangFunction.flagSet.add(Flag.ATTACHED);
objectTypeNode.addFunction(bLangFunction);
dlog.error(getPosition(node), DiagnosticErrorCode.OBJECT_TYPE_DEF_DOES_NOT_ALLOW_RESOURCE_FUNC_DECL);
} else if (bLangNode.getKind() == NodeKind.VARIABLE) {
objectTypeNode.addField((BLangSimpleVariable) bLangNode);
} else if (bLangNode.getKind() == NodeKind.USER_DEFINED_TYPE) {
objectTypeNode.addTypeReference((BLangType) bLangNode);
}
}
objectTypeNode.pos = getPosition(objTypeDescNode);
if (members.size() > 0) {
objectTypeNode.pos = trimLeft(objectTypeNode.pos, getPosition(members.get(0)));
objectTypeNode.pos = trimRight(objectTypeNode.pos, getPosition(members.get(members.size() - 1)));
} else {
objectTypeNode.pos = trimLeft(objectTypeNode.pos, getPosition(objTypeDescNode.closeBrace()));
objectTypeNode.pos = trimRight(objectTypeNode.pos, getPosition(objTypeDescNode.openBrace()));
}
boolean isAnonymous = checkIfAnonymous(objTypeDescNode);
objectTypeNode.isAnonymous = isAnonymous;
if (!isAnonymous) {
return objectTypeNode;
}
return deSugarTypeAsUserDefType(objectTypeNode);
}
public BLangClassDefinition transformObjectCtorExpressionBody(NodeList<Node> members) {
BLangClassDefinition classDefinition = (BLangClassDefinition) TreeBuilder.createClassDefNode();
classDefinition.flagSet.add(Flag.ANONYMOUS);
classDefinition.flagSet.add(Flag.OBJECT_CTOR);
for (Node node : members) {
BLangNode bLangNode = node.apply(this);
NodeKind nodeKind = bLangNode.getKind();
if (nodeKind == NodeKind.FUNCTION || bLangNode.getKind() == NodeKind.RESOURCE_FUNC) {
BLangFunction bLangFunction = (BLangFunction) bLangNode;
bLangFunction.attachedFunction = true;
bLangFunction.flagSet.add(Flag.ATTACHED);
if (!Names.USER_DEFINED_INIT_SUFFIX.value.equals(bLangFunction.name.value)) {
classDefinition.addFunction(bLangFunction);
continue;
}
if (classDefinition.initFunction != null) {
classDefinition.addFunction(bLangFunction);
continue;
}
if (bLangFunction.requiredParams.size() != 0) {
dlog.error(bLangFunction.pos, DiagnosticErrorCode.OBJECT_CTOR_INIT_CANNOT_HAVE_PARAMETERS);
continue;
}
bLangFunction.objInitFunction = true;
classDefinition.initFunction = bLangFunction;
} else if (nodeKind == NodeKind.VARIABLE) {
classDefinition.addField((BLangSimpleVariable) bLangNode);
} else if (nodeKind == NodeKind.USER_DEFINED_TYPE) {
dlog.error(bLangNode.pos, DiagnosticErrorCode.OBJECT_CTOR_DOES_NOT_SUPPORT_TYPE_REFERENCE_MEMBERS);
}
}
classDefinition.internal = true;
return classDefinition;
}
/**
* Object constructor expression creates a class definition for the type defined through the object constructor.
* Then add the class definition as a top level node. Using the class definition initialize the object defined in
* the object constructor. Therefore this can be considered as a desugar.
* example:
* var objVariable = object { int n; };
*
* class anonType0 { int n; }
* var objVariable = new anonType0();
*
* @param objectConstructorExpressionNode object ctor expression node
* @return BLangTypeInit node which initialize the class definition
*/
@Override
public BLangNode transform(ObjectConstructorExpressionNode objectConstructorExpressionNode) {
Location pos = getPositionWithoutMetadata(objectConstructorExpressionNode);
BLangClassDefinition anonClass = transformObjectCtorExpressionBody(objectConstructorExpressionNode.members());
anonClass.pos = pos;
BLangObjectConstructorExpression objectCtorExpression = TreeBuilder.createObjectCtorExpression();
objectCtorExpression.pos = pos;
objectCtorExpression.classNode = anonClass;
String genName = anonymousModelHelper.getNextAnonymousTypeKey(packageID);
IdentifierNode anonTypeGenName = createIdentifier(pos, genName);
anonClass.setName(anonTypeGenName);
anonClass.flagSet.add(Flag.PUBLIC);
Optional<TypeDescriptorNode> typeReference = objectConstructorExpressionNode.typeReference();
typeReference.ifPresent(typeReferenceNode -> {
objectCtorExpression.addTypeReference(createTypeNode(typeReferenceNode));
});
anonClass.annAttachments = applyAll(objectConstructorExpressionNode.annotations());
addToTop(anonClass);
NodeList<Token> objectConstructorQualifierList = objectConstructorExpressionNode.objectTypeQualifiers();
for (Token qualifier : objectConstructorQualifierList) {
SyntaxKind kind = qualifier.kind();
if (kind == SyntaxKind.CLIENT_KEYWORD) {
anonClass.flagSet.add(Flag.CLIENT);
objectCtorExpression.isClient = true;
} else if (kind == SyntaxKind.ISOLATED_KEYWORD) {
anonClass.flagSet.add(Flag.ISOLATED);
} else if (qualifier.kind() == SyntaxKind.SERVICE_KEYWORD) {
anonClass.flagSet.add(SERVICE);
objectCtorExpression.isService = true;
} else {
throw new RuntimeException("Syntax kind is not supported: " + kind);
}
}
BLangIdentifier identifier = (BLangIdentifier) TreeBuilder.createIdentifierNode();
BLangUserDefinedType userDefinedType = createUserDefinedType(pos, identifier, anonClass.name);
BLangTypeInit initNode = (BLangTypeInit) TreeBuilder.createInitNode();
initNode.pos = pos;
initNode.userDefinedType = userDefinedType;
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier pkgAlias = createIdentifier(pos, "");
BLangNameReference nameReference = new BLangNameReference(pos, null, pkgAlias, createIdentifier(pos, genName));
invocationNode.name = (BLangIdentifier) nameReference.name;
invocationNode.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
initNode.argsExpr.addAll(invocationNode.argExprs);
initNode.initInvocation = invocationNode;
objectCtorExpression.typeInit = initNode;
return objectCtorExpression;
}
@Override
public BLangNode transform(ObjectFieldNode objFieldNode) {
BLangSimpleVariable simpleVar = createSimpleVar(objFieldNode.fieldName(), objFieldNode.typeName(),
objFieldNode.expression().orElse(null), objFieldNode.visibilityQualifier().orElse(null),
getAnnotations(objFieldNode.metadata()));
Optional<Node> doc = getDocumentationString(objFieldNode.metadata());
simpleVar.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(doc);
NodeList<Token> qualifierList = objFieldNode.qualifierList();
for (Token token : qualifierList) {
if (token.kind() == SyntaxKind.FINAL_KEYWORD) {
addFinalQualifier(simpleVar);
} else if (token.kind() == SyntaxKind.RESOURCE_KEYWORD) {
addResourceQualifier(simpleVar);
}
}
simpleVar.flagSet.add(Flag.FIELD);
simpleVar.pos = getPositionWithoutMetadata(objFieldNode);
return simpleVar;
}
private void addResourceQualifier(BLangSimpleVariable simpleVar) {
simpleVar.flagSet.add(Flag.RESOURCE);
}
@Override
public BLangNode transform(ExpressionFunctionBodyNode expressionFunctionBodyNode) {
BLangExprFunctionBody bLExprFunctionBody = (BLangExprFunctionBody) TreeBuilder.createExprFunctionBodyNode();
bLExprFunctionBody.expr = createExpression(expressionFunctionBodyNode.expression());
bLExprFunctionBody.pos = getPosition(expressionFunctionBodyNode);
return bLExprFunctionBody;
}
@Override
public BLangNode transform(RecordTypeDescriptorNode recordTypeDescriptorNode) {
BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) TreeBuilder.createRecordTypeNode();
boolean hasRestField = false;
boolean isAnonymous = checkIfAnonymous(recordTypeDescriptorNode);
for (Node field : recordTypeDescriptorNode.fields()) {
if (field.kind() == SyntaxKind.RECORD_FIELD) {
BLangSimpleVariable bLFiled = (BLangSimpleVariable) field.apply(this);
Optional<Node> doc = getDocumentationString(((RecordFieldNode) field).metadata());
bLFiled.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(doc);
recordTypeNode.fields.add(bLFiled);
} else if (field.kind() == SyntaxKind.RECORD_FIELD_WITH_DEFAULT_VALUE) {
BLangSimpleVariable bLFiled = (BLangSimpleVariable) field.apply(this);
Optional<Node> doc = getDocumentationString(((RecordFieldWithDefaultValueNode) field).metadata());
bLFiled.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(doc);
recordTypeNode.fields.add(bLFiled);
} else {
recordTypeNode.addTypeReference(createTypeNode(field));
}
}
Optional<RecordRestDescriptorNode> recordRestDesc = recordTypeDescriptorNode.recordRestDescriptor();
if (recordRestDesc.isPresent()) {
recordTypeNode.restFieldType = createTypeNode(recordRestDesc.get());
hasRestField = true;
}
boolean isOpen = recordTypeDescriptorNode.bodyStartDelimiter().kind() == SyntaxKind.OPEN_BRACE_TOKEN;
recordTypeNode.sealed = !(hasRestField || isOpen);
recordTypeNode.pos = getPosition(recordTypeDescriptorNode);
recordTypeNode.isAnonymous = isAnonymous;
recordTypeNode.isLocal = this.isInLocalContext;
if (!isAnonymous || this.isInLocalContext) {
return recordTypeNode;
}
return createAnonymousRecordType(recordTypeDescriptorNode, recordTypeNode);
}
@Override
public BLangNode transform(SingletonTypeDescriptorNode singletonTypeDescriptorNode) {
BLangFiniteTypeNode bLangFiniteTypeNode = new BLangFiniteTypeNode();
BLangLiteral simpleLiteral = createSimpleLiteral(singletonTypeDescriptorNode.simpleContExprNode());
bLangFiniteTypeNode.valueSpace.add(simpleLiteral);
return bLangFiniteTypeNode;
}
@Override
public BLangNode transform(BuiltinSimpleNameReferenceNode singletonTypeDescriptorNode) {
return createTypeNode(singletonTypeDescriptorNode);
}
@Override
public BLangNode transform(TypeReferenceNode typeReferenceNode) {
return createTypeNode(typeReferenceNode.typeName());
}
@Override
public BLangNode transform(RecordFieldNode recordFieldNode) {
BLangSimpleVariable simpleVar = createSimpleVar(recordFieldNode.fieldName(), recordFieldNode.typeName(),
getAnnotations(recordFieldNode.metadata()));
simpleVar.flagSet.add(Flag.PUBLIC);
if (recordFieldNode.questionMarkToken().isPresent()) {
simpleVar.flagSet.add(Flag.OPTIONAL);
} else {
simpleVar.flagSet.add(Flag.REQUIRED);
}
simpleVar.flagSet.add(Flag.FIELD);
addReadOnlyQualifier(recordFieldNode.readonlyKeyword(), simpleVar);
simpleVar.pos = getPositionWithoutMetadata(recordFieldNode);
return simpleVar;
}
@Override
public BLangNode transform(RecordFieldWithDefaultValueNode recordFieldNode) {
BLangSimpleVariable simpleVar = createSimpleVar(recordFieldNode.fieldName(), recordFieldNode.typeName(),
getAnnotations(recordFieldNode.metadata()));
simpleVar.flagSet.add(Flag.PUBLIC);
if (isPresent(recordFieldNode.expression())) {
simpleVar.setInitialExpression(createExpression(recordFieldNode.expression()));
}
addReadOnlyQualifier(recordFieldNode.readonlyKeyword(), simpleVar);
simpleVar.pos = getPositionWithoutMetadata(recordFieldNode);
return simpleVar;
}
private void addReadOnlyQualifier(Optional<Token> readonlyKeyword, BLangSimpleVariable simpleVar) {
if (readonlyKeyword.isPresent()) {
simpleVar.flagSet.add(Flag.READONLY);
}
}
@Override
public BLangNode transform(RecordRestDescriptorNode recordFieldNode) {
return createTypeNode(recordFieldNode.typeName());
}
@Override
public BLangNode transform(FunctionDefinitionNode funcDefNode) {
BLangFunction bLFunction;
if (funcDefNode.relativeResourcePath().isEmpty()) {
bLFunction = createFunctionNode(funcDefNode.functionName(), funcDefNode.qualifierList(),
funcDefNode.functionSignature(), funcDefNode.functionBody());
} else {
bLFunction = createResourceFunctionNode(funcDefNode.functionName(),
funcDefNode.qualifierList(), funcDefNode.relativeResourcePath(),
funcDefNode.functionSignature(), funcDefNode.functionBody());
}
bLFunction.annAttachments = applyAll(getAnnotations(funcDefNode.metadata()));
bLFunction.pos = getPositionWithoutMetadata(funcDefNode);
bLFunction.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(funcDefNode.metadata()));
return bLFunction;
}
private BLangFunction createFunctionNode(IdentifierToken funcName, NodeList<Token> qualifierList,
FunctionSignatureNode functionSignature, FunctionBodyNode functionBody) {
BLangFunction bLFunction = (BLangFunction) TreeBuilder.createFunctionNode();
BLangIdentifier name = createIdentifier(getPosition(funcName), funcName);
populateFunctionNode(name, qualifierList, functionSignature, functionBody, bLFunction);
return bLFunction;
}
private void populateFunctionNode(BLangIdentifier name, NodeList<Token> qualifierList,
FunctionSignatureNode functionSignature, FunctionBodyNode functionBody,
BLangFunction bLFunction) {
bLFunction.name = name;
setFunctionQualifiers(bLFunction, qualifierList);
populateFuncSignature(bLFunction, functionSignature);
if (functionBody == null) {
bLFunction.body = null;
bLFunction.flagSet.add(Flag.INTERFACE);
bLFunction.interfaceFunction = true;
} else {
bLFunction.body = (BLangFunctionBody) functionBody.apply(this);
if (bLFunction.body.getKind() == NodeKind.EXTERN_FUNCTION_BODY) {
bLFunction.flagSet.add(Flag.NATIVE);
}
}
}
private void setFunctionQualifiers(BLangFunction bLFunction, NodeList<Token> qualifierList) {
for (Token qualifier : qualifierList) {
switch (qualifier.kind()) {
case PUBLIC_KEYWORD:
bLFunction.flagSet.add(Flag.PUBLIC);
break;
case PRIVATE_KEYWORD:
bLFunction.flagSet.add(Flag.PRIVATE);
break;
case REMOTE_KEYWORD:
bLFunction.flagSet.add(Flag.REMOTE);
break;
case TRANSACTIONAL_KEYWORD:
bLFunction.flagSet.add(Flag.TRANSACTIONAL);
break;
case RESOURCE_KEYWORD:
bLFunction.flagSet.add(Flag.RESOURCE);
break;
case ISOLATED_KEYWORD:
bLFunction.flagSet.add(Flag.ISOLATED);
break;
default:
continue;
}
}
}
@Override
public BLangNode transform(ExternalFunctionBodyNode externalFunctionBodyNode) {
BLangExternalFunctionBody externFunctionBodyNode =
(BLangExternalFunctionBody) TreeBuilder.createExternFunctionBodyNode();
externFunctionBodyNode.annAttachments = applyAll(externalFunctionBodyNode.annotations());
return externFunctionBodyNode;
}
@Override
public BLangNode transform(ExplicitAnonymousFunctionExpressionNode anonFuncExprNode) {
BLangFunction bLFunction = (BLangFunction) TreeBuilder.createFunctionNode();
Location pos = getPosition(anonFuncExprNode);
bLFunction.name = createIdentifier(symTable.builtinPos,
anonymousModelHelper.getNextAnonymousFunctionKey(packageID));
populateFuncSignature(bLFunction, anonFuncExprNode.functionSignature());
bLFunction.body = (BLangFunctionBody) anonFuncExprNode.functionBody().apply(this);
bLFunction.pos = pos;
bLFunction.addFlag(Flag.LAMBDA);
bLFunction.addFlag(Flag.ANONYMOUS);
setFunctionQualifiers(bLFunction, anonFuncExprNode.qualifierList());
addToTop(bLFunction);
BLangLambdaFunction lambdaExpr = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaExpr.function = bLFunction;
lambdaExpr.pos = pos;
return lambdaExpr;
}
@Override
public BLangNode transform(FunctionBodyBlockNode functionBodyBlockNode) {
BLangBlockFunctionBody bLFuncBody = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
this.isInLocalContext = true;
List<BLangStatement> statements = new ArrayList<>();
if (functionBodyBlockNode.namedWorkerDeclarator().isPresent()) {
NamedWorkerDeclarator namedWorkerDeclarator = functionBodyBlockNode.namedWorkerDeclarator().get();
generateAndAddBLangStatements(namedWorkerDeclarator.workerInitStatements(), statements);
for (NamedWorkerDeclarationNode workerDeclarationNode : namedWorkerDeclarator.namedWorkerDeclarations()) {
statements.add((BLangStatement) workerDeclarationNode.apply(this));
while (!this.additionalStatements.empty()) {
statements.add(additionalStatements.pop());
}
}
}
generateAndAddBLangStatements(functionBodyBlockNode.statements(), statements);
bLFuncBody.stmts = statements;
bLFuncBody.pos = getPosition(functionBodyBlockNode);
this.isInLocalContext = false;
return bLFuncBody;
}
@Override
public BLangNode transform(ForEachStatementNode forEachStatementNode) {
BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();
foreach.pos = getPosition(forEachStatementNode);
TypedBindingPatternNode typedBindingPatternNode = forEachStatementNode.typedBindingPattern();
VariableDefinitionNode variableDefinitionNode = createBLangVarDef(getPosition(typedBindingPatternNode),
typedBindingPatternNode, Optional.empty(), Optional.empty());
foreach.setVariableDefinitionNode(variableDefinitionNode);
foreach.isDeclaredWithVar = typedBindingPatternNode.typeDescriptor().kind() == SyntaxKind.VAR_TYPE_DESC;
BLangBlockStmt foreachBlock = (BLangBlockStmt) forEachStatementNode.blockStatement().apply(this);
foreachBlock.pos = getPosition(forEachStatementNode.blockStatement());
foreach.setBody(foreachBlock);
foreach.setCollection(createExpression(forEachStatementNode.actionOrExpressionNode()));
forEachStatementNode.onFailClause().ifPresent(onFailClauseNode -> {
foreach.setOnFailClause(
(org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this)));
});
return foreach;
}
@Override
public BLangNode transform(ForkStatementNode forkStatementNode) {
BLangForkJoin forkJoin = (BLangForkJoin) TreeBuilder.createForkJoinNode();
Location forkStmtPos = getPosition(forkStatementNode);
forkJoin.pos = forkStmtPos;
return forkJoin;
}
@Override
public BLangNode transform(NamedWorkerDeclarationNode namedWorkerDeclNode) {
BLangFunction bLFunction = (BLangFunction) TreeBuilder.createFunctionNode();
Location workerBodyPos = getPosition(namedWorkerDeclNode.workerBody());
bLFunction.name = createIdentifier(symTable.builtinPos,
anonymousModelHelper.getNextAnonymousFunctionKey(packageID));
BLangBlockStmt blockStmt = (BLangBlockStmt) namedWorkerDeclNode.workerBody().apply(this);
BLangBlockFunctionBody bodyNode = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
bodyNode.stmts = blockStmt.stmts;
bodyNode.pos = workerBodyPos;
bLFunction.body = bodyNode;
bLFunction.internal = true;
bLFunction.pos = workerBodyPos;
bLFunction.addFlag(Flag.LAMBDA);
bLFunction.addFlag(Flag.ANONYMOUS);
bLFunction.addFlag(Flag.WORKER);
if (namedWorkerDeclNode.transactionalKeyword().isPresent()) {
bLFunction.addFlag(Flag.TRANSACTIONAL);
}
String workerName;
if (namedWorkerDeclNode.workerName().isMissing()) {
workerName = missingNodesHelper.getNextMissingNodeName(packageID);
} else {
workerName = namedWorkerDeclNode.workerName().text();
}
if (workerName.startsWith(IDENTIFIER_LITERAL_PREFIX)) {
bLFunction.defaultWorkerName.originalValue = workerName;
workerName = IdentifierUtils.unescapeUnicodeCodepoints(workerName.substring(1));
}
bLFunction.defaultWorkerName.value = workerName;
bLFunction.defaultWorkerName.pos = getPosition(namedWorkerDeclNode.workerName());
NodeList<AnnotationNode> annotations = namedWorkerDeclNode.annotations();
bLFunction.annAttachments = applyAll(annotations);
Optional<Node> retNode = namedWorkerDeclNode.returnTypeDesc();
if (retNode.isPresent()) {
ReturnTypeDescriptorNode returnType = (ReturnTypeDescriptorNode) retNode.get();
bLFunction.setReturnTypeNode(createTypeNode(returnType.type()));
} else {
BLangValueType bLValueType = (BLangValueType) TreeBuilder.createValueTypeNode();
bLValueType.pos = getPosition(namedWorkerDeclNode);
bLValueType.typeKind = TypeKind.NIL;
bLFunction.setReturnTypeNode(bLValueType);
}
addToTop(bLFunction);
BLangLambdaFunction lambdaExpr = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaExpr.function = bLFunction;
lambdaExpr.pos = workerBodyPos;
lambdaExpr.internal = true;
String workerLambdaName = WORKER_LAMBDA_VAR_PREFIX + workerName;
Location workerNamePos = getPosition(namedWorkerDeclNode.workerName());
BLangSimpleVariable var = new SimpleVarBuilder()
.with(workerLambdaName, workerNamePos)
.setExpression(lambdaExpr)
.isDeclaredWithVar()
.isFinal()
.build();
if (namedWorkerDeclNode.transactionalKeyword().isPresent()) {
var.addFlag(Flag.TRANSACTIONAL);
}
BLangSimpleVariableDef lamdaWrkr = (BLangSimpleVariableDef) TreeBuilder.createSimpleVariableDefinitionNode();
lamdaWrkr.pos = workerBodyPos;
var.pos = workerBodyPos;
lamdaWrkr.setVariable(var);
lamdaWrkr.isWorker = true;
lamdaWrkr.internal = var.internal = true;
if (namedWorkerDeclNode.parent().kind() == SyntaxKind.FORK_STATEMENT) {
lamdaWrkr.isInFork = true;
lamdaWrkr.var.flagSet.add(Flag.FORKED);
}
BLangInvocation bLInvocation = (BLangInvocation) TreeBuilder.createActionInvocation();
BLangIdentifier nameInd = this.createIdentifier(workerNamePos, workerLambdaName);
BLangNameReference reference = new BLangNameReference(workerNamePos, null, TreeBuilder.createIdentifierNode(),
nameInd);
bLInvocation.pkgAlias = (BLangIdentifier) reference.pkgAlias;
bLInvocation.name = (BLangIdentifier) reference.name;
bLInvocation.pos = workerNamePos;
bLInvocation.flagSet = new HashSet<>();
bLInvocation.annAttachments = bLFunction.annAttachments;
if (bLInvocation.getKind() == NodeKind.INVOCATION) {
bLInvocation.async = true;
} else {
dlog.error(workerBodyPos, DiagnosticErrorCode.START_REQUIRE_INVOCATION);
}
BLangSimpleVariable invoc = new SimpleVarBuilder()
.with(workerName, workerNamePos)
.isDeclaredWithVar()
.isWorkerVar()
.setExpression(bLInvocation)
.isFinal()
.setPos(workerNamePos)
.build();
BLangSimpleVariableDef workerInvoc = (BLangSimpleVariableDef) TreeBuilder.createSimpleVariableDefinitionNode();
workerInvoc.pos = workerNamePos;
workerInvoc.setVariable(invoc);
workerInvoc.isWorker = true;
invoc.flagSet.add(Flag.WORKER);
this.additionalStatements.push(workerInvoc);
return lamdaWrkr;
}
private <A extends BLangNode, B extends Node> List<A> applyAll(NodeList<B> annotations) {
ArrayList<A> annAttachments = new ArrayList<>();
if (annotations == null) {
return annAttachments;
}
for (B annotation : annotations) {
A blNode = (A) annotation.apply(this);
annAttachments.add(blNode);
}
return annAttachments;
}
@Override
public BLangNode transform(AnnotationNode annotation) {
Node name = annotation.annotReference();
BLangAnnotationAttachment bLAnnotationAttachment =
(BLangAnnotationAttachment) TreeBuilder.createAnnotAttachmentNode();
if (annotation.annotValue().isPresent()) {
MappingConstructorExpressionNode map = annotation.annotValue().get();
BLangExpression bLExpression = (BLangExpression) map.apply(this);
bLAnnotationAttachment.setExpression(bLExpression);
}
BLangNameReference nameReference = createBLangNameReference(name);
bLAnnotationAttachment.setAnnotationName(nameReference.name);
bLAnnotationAttachment.setPackageAlias(nameReference.pkgAlias);
bLAnnotationAttachment.pos = getPosition(annotation);
return bLAnnotationAttachment;
}
@Override
public BLangNode transform(QueryActionNode queryActionNode) {
BLangQueryAction bLQueryAction = (BLangQueryAction) TreeBuilder.createQueryActionNode();
BLangDoClause doClause = (BLangDoClause) TreeBuilder.createDoClauseNode();
doClause.body = (BLangBlockStmt) queryActionNode.blockStatement().apply(this);
doClause.body.pos = expandLeft(doClause.body.pos, getPosition(queryActionNode.doKeyword()));
doClause.pos = doClause.body.pos;
bLQueryAction.queryClauseList.add(queryActionNode.queryPipeline().fromClause().apply(this));
bLQueryAction.queryClauseList.addAll(applyAll(queryActionNode.queryPipeline().intermediateClauses()));
bLQueryAction.queryClauseList.add(doClause);
bLQueryAction.doClause = doClause;
bLQueryAction.pos = getPosition(queryActionNode);
return bLQueryAction;
}
@Override
public BLangNode transform(AnnotationDeclarationNode annotationDeclarationNode) {
BLangAnnotation annotationDecl = (BLangAnnotation) TreeBuilder.createAnnotationNode();
Location pos = getPositionWithoutMetadata(annotationDeclarationNode);
annotationDecl.pos = pos;
annotationDecl.name = createIdentifier(annotationDeclarationNode.annotationTag());
if (annotationDeclarationNode.visibilityQualifier().isPresent()) {
annotationDecl.addFlag(Flag.PUBLIC);
}
if (annotationDeclarationNode.constKeyword().isPresent()) {
annotationDecl.addFlag(Flag.CONSTANT);
}
annotationDecl.annAttachments = applyAll(getAnnotations(annotationDeclarationNode.metadata()));
annotationDecl.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(annotationDeclarationNode.metadata()));
Optional<Node> typedesc = annotationDeclarationNode.typeDescriptor();
if (typedesc.isPresent()) {
annotationDecl.typeNode = createTypeNode(typedesc.get());
}
SeparatedNodeList<Node> paramList = annotationDeclarationNode.attachPoints();
for (Node child : paramList) {
AnnotationAttachPointNode attachPoint = (AnnotationAttachPointNode) child;
boolean source = attachPoint.sourceKeyword().isPresent();
AttachPoint bLAttachPoint;
NodeList<Token> idents = attachPoint.identifiers();
Token firstIndent = idents.get(0);
switch (firstIndent.kind()) {
case OBJECT_KEYWORD:
Token secondIndent = idents.get(1);
switch (secondIndent.kind()) {
case FUNCTION_KEYWORD:
bLAttachPoint =
AttachPoint.getAttachmentPoint(AttachPoint.Point.OBJECT_METHOD.getValue(), source);
break;
case FIELD_KEYWORD:
bLAttachPoint =
AttachPoint.getAttachmentPoint(AttachPoint.Point.OBJECT_FIELD.getValue(), source);
break;
default:
throw new RuntimeException("Syntax kind is not supported: " + secondIndent.kind());
}
break;
case SERVICE_KEYWORD:
String value;
if (idents.size() == 1) {
value = AttachPoint.Point.SERVICE.getValue();
} else if (idents.size() == 3) {
value = AttachPoint.Point.SERVICE_REMOTE.getValue();
} else {
throw new RuntimeException("Invalid annotation attach point");
}
bLAttachPoint = AttachPoint.getAttachmentPoint(value, source);
break;
case RECORD_KEYWORD:
bLAttachPoint = AttachPoint.getAttachmentPoint(AttachPoint.Point.RECORD_FIELD.getValue(), source);
break;
default:
bLAttachPoint = AttachPoint.getAttachmentPoint(firstIndent.text(), source);
}
annotationDecl.addAttachPoint(bLAttachPoint);
}
return annotationDecl;
}
@Override
public BLangNode transform(AnnotAccessExpressionNode annotAccessExpressionNode) {
BLangAnnotAccessExpr annotAccessExpr = (BLangAnnotAccessExpr) TreeBuilder.createAnnotAccessExpressionNode();
Node annotTagReference = annotAccessExpressionNode.annotTagReference();
if (annotAccessExpressionNode.annotTagReference().kind() == SyntaxKind.SIMPLE_NAME_REFERENCE) {
SimpleNameReferenceNode annotName = (SimpleNameReferenceNode) annotTagReference;
annotAccessExpr.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
annotAccessExpr.annotationName = createIdentifier(annotName.name());
} else {
QualifiedNameReferenceNode qulifiedName =
(QualifiedNameReferenceNode) annotTagReference;
annotAccessExpr.pkgAlias = createIdentifier(qulifiedName.modulePrefix());
annotAccessExpr.annotationName = createIdentifier(qulifiedName.identifier());
}
annotAccessExpr.pos = getPosition(annotAccessExpressionNode);
annotAccessExpr.expr = createExpression(annotAccessExpressionNode.expression());
return annotAccessExpr;
}
@Override
public BLangNode transform(ConditionalExpressionNode conditionalExpressionNode) {
BLangTernaryExpr ternaryExpr = (BLangTernaryExpr) TreeBuilder.createTernaryExpressionNode();
ternaryExpr.pos = getPosition(conditionalExpressionNode);
ternaryExpr.elseExpr = createExpression(conditionalExpressionNode.endExpression());
ternaryExpr.thenExpr = createExpression(conditionalExpressionNode.middleExpression());
ternaryExpr.expr = createExpression(conditionalExpressionNode.lhsExpression());
if (ternaryExpr.expr.getKind() == NodeKind.TERNARY_EXPR) {
BLangTernaryExpr root = (BLangTernaryExpr) ternaryExpr.expr;
BLangTernaryExpr parent = root;
while (parent.elseExpr.getKind() == NodeKind.TERNARY_EXPR) {
parent = (BLangTernaryExpr) parent.elseExpr;
}
ternaryExpr.expr = parent.elseExpr;
parent.elseExpr = ternaryExpr;
ternaryExpr = root;
}
return ternaryExpr;
}
@Override
public BLangNode transform(CheckExpressionNode checkExpressionNode) {
Location pos = getPosition(checkExpressionNode);
BLangExpression expr = createExpression(checkExpressionNode.expression());
if (checkExpressionNode.checkKeyword().kind() == SyntaxKind.CHECK_KEYWORD) {
return createCheckExpr(pos, expr);
}
return createCheckPanickedExpr(pos, expr);
}
@Override
public BLangNode transform(TypeTestExpressionNode typeTestExpressionNode) {
BLangTypeTestExpr typeTestExpr = (BLangTypeTestExpr) TreeBuilder.createTypeTestExpressionNode();
typeTestExpr.expr = createExpression(typeTestExpressionNode.expression());
typeTestExpr.typeNode = createTypeNode(typeTestExpressionNode.typeDescriptor());
typeTestExpr.pos = getPosition(typeTestExpressionNode);
return typeTestExpr;
}
@Override
public BLangNode transform(MappingConstructorExpressionNode mapConstruct) {
BLangRecordLiteral bLiteralNode = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
for (MappingFieldNode field : mapConstruct.fields()) {
if (field.kind() == SyntaxKind.SPREAD_FIELD) {
SpreadFieldNode spreadFieldNode = (SpreadFieldNode) field;
BLangRecordSpreadOperatorField bLRecordSpreadOpField =
(BLangRecordSpreadOperatorField) TreeBuilder.createRecordSpreadOperatorField();
bLRecordSpreadOpField.expr = createExpression(spreadFieldNode.valueExpr());
bLRecordSpreadOpField.pos = getPosition(spreadFieldNode);
bLiteralNode.fields.add(bLRecordSpreadOpField);
} else if (field.kind() == SyntaxKind.COMPUTED_NAME_FIELD) {
ComputedNameFieldNode computedNameField = (ComputedNameFieldNode) field;
BLangRecordKeyValueField bLRecordKeyValueField =
(BLangRecordKeyValueField) TreeBuilder.createRecordKeyValue();
bLRecordKeyValueField.valueExpr = createExpression(computedNameField.valueExpr());
bLRecordKeyValueField.key =
new BLangRecordLiteral.BLangRecordKey(createExpression(computedNameField.fieldNameExpr()));
bLRecordKeyValueField.key.computedKey = true;
bLiteralNode.fields.add(bLRecordKeyValueField);
} else {
SpecificFieldNode specificField = (SpecificFieldNode) field;
io.ballerina.compiler.syntax.tree.ExpressionNode valueExpr = specificField.valueExpr().orElse(null);
if (valueExpr == null) {
BLangRecordLiteral.BLangRecordVarNameField fieldVar =
(BLangRecordLiteral.BLangRecordVarNameField) TreeBuilder.createRecordVarRefNameFieldNode();
fieldVar.variableName = createIdentifier((Token) ((SpecificFieldNode) field).fieldName());
fieldVar.pkgAlias = createIdentifier(null, "");
fieldVar.pos = fieldVar.variableName.pos;
fieldVar.readonly = specificField.readonlyKeyword().isPresent();
bLiteralNode.fields.add(fieldVar);
} else {
BLangRecordKeyValueField bLRecordKeyValueField =
(BLangRecordKeyValueField) TreeBuilder.createRecordKeyValue();
bLRecordKeyValueField.pos = getPosition(specificField);
bLRecordKeyValueField.readonly = specificField.readonlyKeyword().isPresent();
bLRecordKeyValueField.valueExpr = createExpression(valueExpr);
bLRecordKeyValueField.key =
new BLangRecordLiteral.BLangRecordKey(createExpression(specificField.fieldName()));
bLRecordKeyValueField.key.computedKey = false;
bLRecordKeyValueField.key.pos = getPosition(specificField.fieldName());
bLiteralNode.fields.add(bLRecordKeyValueField);
}
}
}
bLiteralNode.pos = getPosition(mapConstruct);
return bLiteralNode;
}
@Override
public BLangNode transform(ListConstructorExpressionNode listConstructorExprNode) {
List<BLangExpression> argExprList = new ArrayList<>();
BLangListConstructorExpr listConstructorExpr = (BLangListConstructorExpr)
TreeBuilder.createListConstructorExpressionNode();
for (Node expr : listConstructorExprNode.expressions()) {
argExprList.add(createExpression(expr));
}
listConstructorExpr.exprs = argExprList;
listConstructorExpr.pos = getPosition(listConstructorExprNode);
return listConstructorExpr;
}
@Override
public BLangNode transform(UnaryExpressionNode unaryExprNode) {
Location pos = getPosition(unaryExprNode);
SyntaxKind expressionKind = unaryExprNode.expression().kind();
if (expressionKind == SyntaxKind.NUMERIC_LITERAL) {
BLangNumericLiteral numericLiteral = (BLangNumericLiteral) createSimpleLiteral(unaryExprNode);
return numericLiteral;
}
OperatorKind operator = OperatorKind.valueFrom(unaryExprNode.unaryOperator().text());
BLangExpression expr = createExpression(unaryExprNode.expression());
return createBLangUnaryExpr(pos, operator, expr);
}
@Override
public BLangNode transform(TypeofExpressionNode typeofExpressionNode) {
Location pos = getPosition(typeofExpressionNode);
OperatorKind operator = OperatorKind.valueFrom(typeofExpressionNode.typeofKeyword().text());
BLangExpression expr = createExpression(typeofExpressionNode.expression());
return createBLangUnaryExpr(pos, operator, expr);
}
@Override
public BLangNode transform(BinaryExpressionNode binaryExprNode) {
if (binaryExprNode.operator().kind() == SyntaxKind.ELVIS_TOKEN) {
BLangElvisExpr elvisExpr = (BLangElvisExpr) TreeBuilder.createElvisExpressionNode();
elvisExpr.pos = getPosition(binaryExprNode);
elvisExpr.lhsExpr = createExpression(binaryExprNode.lhsExpr());
elvisExpr.rhsExpr = createExpression(binaryExprNode.rhsExpr());
return elvisExpr;
}
BLangBinaryExpr bLBinaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
bLBinaryExpr.pos = getPosition(binaryExprNode);
bLBinaryExpr.lhsExpr = createExpression(binaryExprNode.lhsExpr());
bLBinaryExpr.rhsExpr = createExpression(binaryExprNode.rhsExpr());
bLBinaryExpr.opKind = OperatorKind.valueFrom(binaryExprNode.operator().text());
return bLBinaryExpr;
}
@Override
public BLangNode transform(FieldAccessExpressionNode fieldAccessExprNode) {
BLangFieldBasedAccess bLFieldBasedAccess;
Node fieldName = fieldAccessExprNode.fieldName();
if (fieldName.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
QualifiedNameReferenceNode qualifiedFieldName = (QualifiedNameReferenceNode) fieldName;
BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess accessWithPrefixNode =
(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess)
TreeBuilder.createFieldBasedAccessWithPrefixNode();
accessWithPrefixNode.nsPrefix = createIdentifier(qualifiedFieldName.modulePrefix());
accessWithPrefixNode.field = createIdentifier(qualifiedFieldName.identifier());
bLFieldBasedAccess = accessWithPrefixNode;
bLFieldBasedAccess.fieldKind = FieldKind.WITH_NS;
} else {
bLFieldBasedAccess = (BLangFieldBasedAccess) TreeBuilder.createFieldBasedAccessNode();
bLFieldBasedAccess.field =
createIdentifier(((SimpleNameReferenceNode) fieldName).name());
bLFieldBasedAccess.fieldKind = FieldKind.SINGLE;
}
io.ballerina.compiler.syntax.tree.ExpressionNode containerExpr = fieldAccessExprNode.expression();
if (containerExpr.kind() == SyntaxKind.BRACED_EXPRESSION) {
bLFieldBasedAccess.expr = createExpression(((BracedExpressionNode) containerExpr).expression());
} else {
bLFieldBasedAccess.expr = createExpression(containerExpr);
}
bLFieldBasedAccess.pos = getPosition(fieldAccessExprNode);
bLFieldBasedAccess.field.pos = getPosition(fieldAccessExprNode.fieldName());
bLFieldBasedAccess.optionalFieldAccess = false;
return bLFieldBasedAccess;
}
@Override
public BLangNode transform(OptionalFieldAccessExpressionNode optionalFieldAccessExpressionNode) {
BLangFieldBasedAccess bLFieldBasedAccess = (BLangFieldBasedAccess) TreeBuilder.createFieldBasedAccessNode();
Node fieldName = optionalFieldAccessExpressionNode.fieldName();
if (fieldName.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
QualifiedNameReferenceNode qualifiedFieldName = (QualifiedNameReferenceNode) fieldName;
BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess accessWithPrefixNode =
(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) TreeBuilder
.createFieldBasedAccessWithPrefixNode();
accessWithPrefixNode.nsPrefix = createIdentifier(qualifiedFieldName.modulePrefix());
accessWithPrefixNode.field = createIdentifier(qualifiedFieldName.identifier());
bLFieldBasedAccess = accessWithPrefixNode;
bLFieldBasedAccess.fieldKind = FieldKind.WITH_NS;
} else {
bLFieldBasedAccess = (BLangFieldBasedAccess) TreeBuilder.createFieldBasedAccessNode();
bLFieldBasedAccess.field = createIdentifier(((SimpleNameReferenceNode) fieldName).name());
bLFieldBasedAccess.fieldKind = FieldKind.SINGLE;
}
bLFieldBasedAccess.pos = getPosition(optionalFieldAccessExpressionNode);
bLFieldBasedAccess.field.pos = getPosition(optionalFieldAccessExpressionNode.fieldName());
bLFieldBasedAccess.expr = createExpression(optionalFieldAccessExpressionNode.expression());
bLFieldBasedAccess.optionalFieldAccess = true;
return bLFieldBasedAccess;
}
@Override
public BLangNode transform(BracedExpressionNode brcExprOut) {
return createExpression(brcExprOut.expression());
}
@Override
public BLangNode transform(FunctionCallExpressionNode functionCallNode) {
return createBLangInvocation(functionCallNode.functionName(), functionCallNode.arguments(),
getPosition(functionCallNode), isFunctionCallAsync(functionCallNode));
}
@Override
public BLangNode transform(ErrorConstructorExpressionNode errorConstructorExprNode) {
BLangErrorConstructorExpr errorConstructorExpr =
(BLangErrorConstructorExpr) TreeBuilder.createErrorConstructorExpressionNode();
errorConstructorExpr.pos = getPosition(errorConstructorExprNode);
if (errorConstructorExprNode.typeReference().isPresent()) {
errorConstructorExpr.errorTypeRef =
(BLangUserDefinedType) createTypeNode(errorConstructorExprNode.typeReference().get());
}
List<BLangExpression> positionalArgs = new ArrayList<>();
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>();
for (Node argNode : errorConstructorExprNode.arguments()) {
if (argNode.kind() == SyntaxKind.POSITIONAL_ARG) {
positionalArgs.add((BLangExpression) transform((PositionalArgumentNode) argNode));
} else if (argNode.kind() == SyntaxKind.NAMED_ARG) {
namedArgs.add((BLangNamedArgsExpression) transform((NamedArgumentNode) argNode));
}
}
errorConstructorExpr.positionalArgs = positionalArgs;
errorConstructorExpr.namedArgs = namedArgs;
return errorConstructorExpr;
}
public BLangNode transform(MethodCallExpressionNode methodCallExprNode) {
BLangInvocation bLInvocation = createBLangInvocation(methodCallExprNode.methodName(),
methodCallExprNode.arguments(),
getPosition(methodCallExprNode), false);
bLInvocation.expr = createExpression(methodCallExprNode.expression());
return bLInvocation;
}
@Override
public BLangNode transform(ImplicitNewExpressionNode implicitNewExprNode) {
BLangTypeInit initNode = createTypeInit(implicitNewExprNode);
BLangInvocation invocationNode = createInvocation(implicitNewExprNode, implicitNewExprNode.newKeyword());
initNode.argsExpr.addAll(invocationNode.argExprs);
initNode.initInvocation = invocationNode;
return initNode;
}
@Override
public BLangNode transform(ExplicitNewExpressionNode explicitNewExprNode) {
BLangTypeInit initNode = createTypeInit(explicitNewExprNode);
BLangInvocation invocationNode = createInvocation(explicitNewExprNode, explicitNewExprNode.newKeyword());
initNode.argsExpr.addAll(invocationNode.argExprs);
initNode.initInvocation = invocationNode;
return initNode;
}
private boolean isFunctionCallAsync(FunctionCallExpressionNode functionCallExpressionNode) {
return functionCallExpressionNode.parent().kind() == SyntaxKind.START_ACTION;
}
private BLangTypeInit createTypeInit(NewExpressionNode expression) {
BLangTypeInit initNode = (BLangTypeInit) TreeBuilder.createInitNode();
initNode.pos = getPosition(expression);
if (expression.kind() == SyntaxKind.EXPLICIT_NEW_EXPRESSION) {
Node type = ((ExplicitNewExpressionNode) expression).typeDescriptor();
initNode.userDefinedType = createTypeNode(type);
}
return initNode;
}
private BLangInvocation createInvocation(NewExpressionNode expression, Token newKeyword) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = getPosition(expression);
populateArgsInvocation(expression, invocationNode);
BLangNameReference nameReference = createBLangNameReference(newKeyword);
invocationNode.name = (BLangIdentifier) nameReference.name;
invocationNode.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
return invocationNode;
}
private void populateArgsInvocation(NewExpressionNode expression, BLangInvocation invocationNode) {
Iterator<FunctionArgumentNode> argumentsIter = getArgumentNodesIterator(expression);
if (argumentsIter != null) {
while (argumentsIter.hasNext()) {
BLangExpression argument = createExpression(argumentsIter.next());
invocationNode.argExprs.add(argument);
}
}
}
private Iterator<FunctionArgumentNode> getArgumentNodesIterator(NewExpressionNode expression) {
Iterator<FunctionArgumentNode> argumentsIter = null;
if (expression.kind() == SyntaxKind.IMPLICIT_NEW_EXPRESSION) {
Optional<ParenthesizedArgList> argsList = ((ImplicitNewExpressionNode) expression).parenthesizedArgList();
if (argsList.isPresent()) {
ParenthesizedArgList argList = argsList.get();
argumentsIter = argList.arguments().iterator();
}
} else {
ParenthesizedArgList argList =
(ParenthesizedArgList) ((ExplicitNewExpressionNode) expression).parenthesizedArgList();
argumentsIter = argList.arguments().iterator();
}
return argumentsIter;
}
@Override
public BLangNode transform(IndexedExpressionNode indexedExpressionNode) {
BLangIndexBasedAccess indexBasedAccess = (BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode();
indexBasedAccess.pos = getPosition(indexedExpressionNode);
SeparatedNodeList<io.ballerina.compiler.syntax.tree.ExpressionNode> keys =
indexedExpressionNode.keyExpression();
if (keys.size() == 1) {
indexBasedAccess.indexExpr = createExpression(indexedExpressionNode.keyExpression().get(0));
} else {
BLangTableMultiKeyExpr multiKeyExpr =
(BLangTableMultiKeyExpr) TreeBuilder.createTableMultiKeyExpressionNode();
multiKeyExpr.pos = getPosition(keys.get(0), keys.get(keys.size() - 1));
List<BLangExpression> multiKeyIndexExprs = new ArrayList<>();
for (io.ballerina.compiler.syntax.tree.ExpressionNode keyExpr : keys) {
multiKeyIndexExprs.add(createExpression(keyExpr));
}
multiKeyExpr.multiKeyIndexExprs = multiKeyIndexExprs;
indexBasedAccess.indexExpr = multiKeyExpr;
}
Node containerExpr = indexedExpressionNode.containerExpression();
BLangExpression expression = createExpression(containerExpr);
if (containerExpr.kind() == SyntaxKind.BRACED_EXPRESSION) {
indexBasedAccess.expr = ((BLangGroupExpr) expression).expression;
BLangGroupExpr group = (BLangGroupExpr) TreeBuilder.createGroupExpressionNode();
group.expression = indexBasedAccess;
group.pos = getPosition(indexedExpressionNode);
return group;
} else if (containerExpr.kind() == SyntaxKind.XML_STEP_EXPRESSION) {
((BLangXMLNavigationAccess) expression).childIndex = indexBasedAccess.indexExpr;
return expression;
}
indexBasedAccess.expr = expression;
return indexBasedAccess;
}
@Override
public BLangTypeConversionExpr transform(TypeCastExpressionNode typeCastExpressionNode) {
BLangTypeConversionExpr typeConversionNode = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
typeConversionNode.pos = getPosition(typeCastExpressionNode);
TypeCastParamNode typeCastParamNode = typeCastExpressionNode.typeCastParam();
if (typeCastParamNode != null && typeCastParamNode.type().isPresent()) {
typeConversionNode.typeNode = createTypeNode(typeCastParamNode.type().get());
}
typeConversionNode.expr = createExpression(typeCastExpressionNode.expression());
typeConversionNode.annAttachments = applyAll(typeCastParamNode.annotations());
return typeConversionNode;
}
@Override
public BLangNode transform(Token token) {
SyntaxKind kind = token.kind();
switch (kind) {
case XML_TEXT_CONTENT:
case TEMPLATE_STRING:
case CLOSE_BRACE_TOKEN:
return createSimpleLiteral(token);
default:
throw new RuntimeException("Syntax kind is not supported: " + kind);
}
}
@Override
public BLangNode transform(InterpolationNode interpolationNode) {
return createExpression(interpolationNode.expression());
}
@Override
public BLangNode transform(TemplateExpressionNode expressionNode) {
SyntaxKind kind = expressionNode.kind();
switch (kind) {
case XML_TEMPLATE_EXPRESSION:
return createXmlTemplateLiteral(expressionNode);
case STRING_TEMPLATE_EXPRESSION:
return createStringTemplateLiteral(expressionNode.content(), getPosition(expressionNode));
case RAW_TEMPLATE_EXPRESSION:
return createRawTemplateLiteral(expressionNode.content(), getPosition(expressionNode));
default:
throw new RuntimeException("Syntax kind is not supported: " + kind);
}
}
@Override
public BLangNode transform(TableConstructorExpressionNode tableConstructorExpressionNode) {
BLangTableConstructorExpr tableConstructorExpr =
(BLangTableConstructorExpr) TreeBuilder.createTableConstructorExpressionNode();
tableConstructorExpr.pos = getPosition(tableConstructorExpressionNode);
for (Node row : tableConstructorExpressionNode.rows()) {
tableConstructorExpr.addRecordLiteral((BLangRecordLiteral) row.apply(this));
}
if (tableConstructorExpressionNode.keySpecifier().isPresent()) {
tableConstructorExpr.tableKeySpecifier =
(BLangTableKeySpecifier) tableConstructorExpressionNode.keySpecifier().orElse(null).apply(this);
}
return tableConstructorExpr;
}
@Override
public BLangNode transform(TrapExpressionNode trapExpressionNode) {
BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode();
trapExpr.expr = createExpression(trapExpressionNode.expression());
trapExpr.pos = getPosition(trapExpressionNode);
return trapExpr;
}
@Override
public BLangNode transform(ReceiveActionNode receiveActionNode) {
BLangWorkerReceive workerReceiveExpr = (BLangWorkerReceive) TreeBuilder.createWorkerReceiveNode();
Node receiveWorkers = receiveActionNode.receiveWorkers();
Token workerName;
if (receiveWorkers.kind() == SyntaxKind.SIMPLE_NAME_REFERENCE) {
workerName = ((SimpleNameReferenceNode) receiveWorkers).name();
} else {
Location receiveFieldsPos = getPosition(receiveWorkers);
dlog.error(receiveFieldsPos, DiagnosticErrorCode.MULTIPLE_RECEIVE_ACTION_NOT_YET_SUPPORTED);
workerName = NodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN,
NodeFactory.createEmptyMinutiaeList(), NodeFactory.createEmptyMinutiaeList());
}
workerReceiveExpr.setWorkerName(createIdentifier(workerName));
workerReceiveExpr.pos = getPosition(receiveActionNode);
return workerReceiveExpr;
}
@Override
public BLangNode transform(SyncSendActionNode syncSendActionNode) {
BLangWorkerSyncSendExpr workerSendExpr = TreeBuilder.createWorkerSendSyncExprNode();
workerSendExpr.setWorkerName(createIdentifier(
syncSendActionNode.peerWorker().name()));
workerSendExpr.expr = createExpression(syncSendActionNode.expression());
workerSendExpr.pos = getPosition(syncSendActionNode);
return workerSendExpr;
}
@Override
public BLangNode transform(ImplicitAnonymousFunctionExpressionNode implicitAnonymousFunctionExpressionNode) {
BLangArrowFunction arrowFunction = (BLangArrowFunction) TreeBuilder.createArrowFunctionNode();
arrowFunction.pos = getPosition(implicitAnonymousFunctionExpressionNode);
arrowFunction.functionName = createIdentifier(arrowFunction.pos,
anonymousModelHelper.getNextAnonymousFunctionKey(packageID));
Node param = implicitAnonymousFunctionExpressionNode.params();
if (param.kind() == SyntaxKind.INFER_PARAM_LIST) {
ImplicitAnonymousFunctionParameters paramsNode = (ImplicitAnonymousFunctionParameters) param;
SeparatedNodeList<SimpleNameReferenceNode> paramList = paramsNode.parameters();
for (SimpleNameReferenceNode child : paramList) {
BLangUserDefinedType userDefinedType = (BLangUserDefinedType) child.apply(this);
BLangSimpleVariable parameter = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
parameter.name = userDefinedType.typeName;
parameter.pos = getPosition(child);
arrowFunction.params.add(parameter);
}
} else {
BLangUserDefinedType userDefinedType = (BLangUserDefinedType) param.apply(this);
BLangSimpleVariable parameter = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
parameter.name = userDefinedType.typeName;
parameter.pos = getPosition(param);
arrowFunction.params.add(parameter);
}
arrowFunction.body = new BLangExprFunctionBody();
arrowFunction.body.expr = createExpression(implicitAnonymousFunctionExpressionNode.expression());
arrowFunction.body.pos = arrowFunction.body.expr.pos;
return arrowFunction;
}
@Override
public BLangNode transform(CommitActionNode commitActionNode) {
BLangCommitExpr commitExpr = TreeBuilder.createCommitExpressionNode();
commitExpr.pos = getPosition(commitActionNode);
return commitExpr;
}
@Override
public BLangNode transform(FlushActionNode flushActionNode) {
BLangWorkerFlushExpr workerFlushExpr = TreeBuilder.createWorkerFlushExpressionNode();
Node optionalPeerWorker = flushActionNode.peerWorker().orElse(null);
if (optionalPeerWorker != null) {
SimpleNameReferenceNode peerWorker = (SimpleNameReferenceNode) optionalPeerWorker;
workerFlushExpr.workerIdentifier = createIdentifier(peerWorker.name());
}
workerFlushExpr.pos = getPosition(flushActionNode);
return workerFlushExpr;
}
@Override
public BLangNode transform(LetExpressionNode letExpressionNode) {
BLangLetExpression letExpr = (BLangLetExpression) TreeBuilder.createLetExpressionNode();
letExpr.pos = getPosition(letExpressionNode);
letExpr.expr = createExpression(letExpressionNode.expression());
List<BLangLetVariable> letVars = new ArrayList<>();
for (LetVariableDeclarationNode letVarDecl : letExpressionNode.letVarDeclarations()) {
letVars.add(createLetVariable(letVarDecl));
}
letExpr.letVarDeclarations = letVars;
return letExpr;
}
public BLangLetVariable createLetVariable(LetVariableDeclarationNode letVarDecl) {
BLangLetVariable letVar = TreeBuilder.createLetVariableNode();
VariableDefinitionNode varDefNode = createBLangVarDef(getPosition(letVarDecl), letVarDecl.typedBindingPattern(),
Optional.of(letVarDecl.expression()), Optional.empty());
varDefNode.getVariable().addFlag(Flag.FINAL);
List<BLangNode> annots = applyAll(letVarDecl.annotations());
for (BLangNode node : annots) {
varDefNode.getVariable().addAnnotationAttachment((AnnotationAttachmentNode) node);
}
letVar.definitionNode = varDefNode;
return letVar;
}
@Override
public BLangNode transform(MappingBindingPatternNode mappingBindingPatternNode) {
BLangRecordVarRef recordVarRef = (BLangRecordVarRef) TreeBuilder.createRecordVariableReferenceNode();
recordVarRef.pos = getPosition(mappingBindingPatternNode);
List<BLangRecordVarRefKeyValue> expressions = new ArrayList<>();
for (BindingPatternNode expr : mappingBindingPatternNode.fieldBindingPatterns()) {
if (expr.kind() == SyntaxKind.REST_BINDING_PATTERN) {
recordVarRef.restParam = createExpression(expr);
} else {
expressions.add(createRecordVarKeyValue(expr));
}
}
recordVarRef.recordRefFields = expressions;
return recordVarRef;
}
private BLangRecordVarRefKeyValue createRecordVarKeyValue(BindingPatternNode expr) {
BLangRecordVarRefKeyValue keyValue = new BLangRecordVarRefKeyValue();
if (expr instanceof FieldBindingPatternFullNode) {
FieldBindingPatternFullNode fullNode = (FieldBindingPatternFullNode) expr;
keyValue.variableName = createIdentifier(fullNode.variableName().name());
keyValue.variableReference = createExpression(fullNode.bindingPattern());
} else {
FieldBindingPatternVarnameNode varnameNode = (FieldBindingPatternVarnameNode) expr;
keyValue.variableName = createIdentifier(varnameNode.variableName().name());
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = getPosition(varnameNode.variableName());
varRef.variableName = createIdentifier(varnameNode.variableName().name());
varRef.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
keyValue.variableReference = varRef;
}
return keyValue;
}
@Override
public BLangNode transform(ListBindingPatternNode listBindingPatternNode) {
BLangTupleVarRef tupleVarRef = (BLangTupleVarRef) TreeBuilder.createTupleVariableReferenceNode();
List<BLangExpression> expressions = new ArrayList<>();
for (BindingPatternNode expr : listBindingPatternNode.bindingPatterns()) {
if (expr.kind() == SyntaxKind.REST_BINDING_PATTERN) {
tupleVarRef.restParam = createExpression(expr);
} else {
expressions.add(createExpression(expr));
}
}
tupleVarRef.expressions = expressions;
tupleVarRef.pos = getPosition(listBindingPatternNode);
return tupleVarRef;
}
@Override
public BLangNode transform(RestBindingPatternNode restBindingPatternNode) {
return createExpression(restBindingPatternNode.variableName());
}
@Override
public BLangNode transform(CaptureBindingPatternNode captureBindingPatternNode) {
return createExpression(captureBindingPatternNode.variableName());
}
@Override
public BLangNode transform(WildcardBindingPatternNode wildcardBindingPatternNode) {
BLangSimpleVarRef ignoreVarRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
BLangIdentifier ignore = (BLangIdentifier) TreeBuilder.createIdentifierNode();
ignore.value = Names.IGNORE.value;
ignoreVarRef.variableName = ignore;
ignore.pos = getPosition(wildcardBindingPatternNode);
return ignoreVarRef;
}
@Override
public BLangNode transform(ErrorBindingPatternNode errorBindingPatternNode) {
BLangErrorVarRef errorVarRef = (BLangErrorVarRef) TreeBuilder.createErrorVariableReferenceNode();
errorVarRef.pos = getPosition(errorBindingPatternNode);
Optional<Node> errorTypeRef = errorBindingPatternNode.typeReference();
if (errorTypeRef.isPresent()) {
errorVarRef.typeNode = createTypeNode(errorTypeRef.get());
}
SeparatedNodeList<BindingPatternNode> argListBindingPatterns = errorBindingPatternNode.argListBindingPatterns();
int numberOfArgs = argListBindingPatterns.size();
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>();
for (int position = 0; position < numberOfArgs; position++) {
BindingPatternNode bindingPatternNode = argListBindingPatterns.get(position);
switch (bindingPatternNode.kind()) {
case CAPTURE_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
if (position == 0) {
errorVarRef.message = (BLangVariableReference) createExpression(bindingPatternNode);
break;
}
case ERROR_BINDING_PATTERN:
errorVarRef.cause = (BLangVariableReference) createExpression(bindingPatternNode);
break;
case NAMED_ARG_BINDING_PATTERN:
namedArgs.add((BLangNamedArgsExpression) bindingPatternNode.apply(this));
break;
default:
errorVarRef.restVar = (BLangVariableReference) createExpression(bindingPatternNode);
}
}
errorVarRef.detail = namedArgs;
return errorVarRef;
}
@Override
public BLangNode transform(NamedArgBindingPatternNode namedArgBindingPatternNode) {
BLangNamedArgsExpression namedArgsExpression = (BLangNamedArgsExpression) TreeBuilder.createNamedArgNode();
namedArgsExpression.pos = getPosition(namedArgBindingPatternNode);
namedArgsExpression.name = createIdentifier(namedArgBindingPatternNode.argName());
namedArgsExpression.expr = createExpression(namedArgBindingPatternNode.bindingPattern());
return namedArgsExpression;
}
@Override
public BLangNode transform(ReturnStatementNode returnStmtNode) {
BLangReturn bLReturn = (BLangReturn) TreeBuilder.createReturnNode();
bLReturn.pos = getPosition(returnStmtNode);
if (returnStmtNode.expression().isPresent()) {
bLReturn.expr = createExpression(returnStmtNode.expression().get());
} else {
BLangLiteral nilLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
nilLiteral.pos = getPosition(returnStmtNode);
nilLiteral.value = Names.NIL_VALUE;
nilLiteral.type = symTable.nilType;
bLReturn.expr = nilLiteral;
}
return bLReturn;
}
@Override
public BLangNode transform(PanicStatementNode panicStmtNode) {
BLangPanic bLPanic = (BLangPanic) TreeBuilder.createPanicNode();
bLPanic.pos = getPosition(panicStmtNode);
bLPanic.expr = createExpression(panicStmtNode.expression());
return bLPanic;
}
@Override
public BLangNode transform(ContinueStatementNode continueStmtNode) {
BLangContinue bLContinue = (BLangContinue) TreeBuilder.createContinueNode();
bLContinue.pos = getPosition(continueStmtNode);
return bLContinue;
}
@Override
public BLangNode transform(ListenerDeclarationNode listenerDeclarationNode) {
Token visibilityQualifier = null;
if (listenerDeclarationNode.visibilityQualifier().isPresent()) {
visibilityQualifier = listenerDeclarationNode.visibilityQualifier().get();
}
BLangSimpleVariable var = new SimpleVarBuilder()
.with(listenerDeclarationNode.variableName())
.setTypeByNode(listenerDeclarationNode.typeDescriptor().orElse(null))
.setExpressionByNode(listenerDeclarationNode.initializer())
.setVisibility(visibilityQualifier)
.isListenerVar()
.build();
var.pos = getPositionWithoutMetadata(listenerDeclarationNode);
var.name.pos = getPosition(listenerDeclarationNode.variableName());
var.annAttachments = applyAll(getAnnotations(listenerDeclarationNode.metadata()));
return var;
}
@Override
public BLangNode transform(BreakStatementNode breakStmtNode) {
BLangBreak bLBreak = (BLangBreak) TreeBuilder.createBreakNode();
bLBreak.pos = getPosition(breakStmtNode);
return bLBreak;
}
@Override
public BLangNode transform(AssignmentStatementNode assignmentStmtNode) {
SyntaxKind lhsKind = assignmentStmtNode.varRef().kind();
switch (lhsKind) {
case LIST_BINDING_PATTERN:
return createTupleDestructureStatement(assignmentStmtNode);
case MAPPING_BINDING_PATTERN:
return createRecordDestructureStatement(assignmentStmtNode);
case ERROR_BINDING_PATTERN:
return createErrorDestructureStatement(assignmentStmtNode);
default:
break;
}
BLangAssignment bLAssignment = (BLangAssignment) TreeBuilder.createAssignmentNode();
BLangExpression lhsExpr = createExpression(assignmentStmtNode.varRef());
validateLvexpr(lhsExpr, DiagnosticErrorCode.INVALID_INVOCATION_LVALUE_ASSIGNMENT);
bLAssignment.setExpression(createExpression(assignmentStmtNode.expression()));
bLAssignment.pos = getPosition(assignmentStmtNode);
bLAssignment.varRef = lhsExpr;
return bLAssignment;
}
public BLangNode createTupleDestructureStatement(AssignmentStatementNode assignmentStmtNode) {
BLangTupleDestructure tupleDestructure =
(BLangTupleDestructure) TreeBuilder.createTupleDestructureStatementNode();
tupleDestructure.varRef = (BLangTupleVarRef) createExpression(assignmentStmtNode.varRef());
tupleDestructure.setExpression(createExpression(assignmentStmtNode.expression()));
tupleDestructure.pos = getPosition(assignmentStmtNode);
return tupleDestructure;
}
public BLangNode createRecordDestructureStatement(AssignmentStatementNode assignmentStmtNode) {
BLangRecordDestructure recordDestructure =
(BLangRecordDestructure) TreeBuilder.createRecordDestructureStatementNode();
recordDestructure.varRef = (BLangRecordVarRef) createExpression(assignmentStmtNode.varRef());
recordDestructure.setExpression(createExpression(assignmentStmtNode.expression()));
recordDestructure.pos = getPosition(assignmentStmtNode);
return recordDestructure;
}
public BLangNode createErrorDestructureStatement(AssignmentStatementNode assignmentStmtNode) {
BLangErrorDestructure errorDestructure =
(BLangErrorDestructure) TreeBuilder.createErrorDestructureStatementNode();
errorDestructure.varRef = (BLangErrorVarRef) createExpression(assignmentStmtNode.varRef());
errorDestructure.setExpression(createExpression(assignmentStmtNode.expression()));
errorDestructure.pos = getPosition(assignmentStmtNode);
return errorDestructure;
}
@Override
public BLangNode transform(CompoundAssignmentStatementNode compoundAssignmentStmtNode) {
BLangCompoundAssignment bLCompAssignment = (BLangCompoundAssignment) TreeBuilder.createCompoundAssignmentNode();
bLCompAssignment.setExpression(createExpression(compoundAssignmentStmtNode.rhsExpression()));
bLCompAssignment
.setVariable((VariableReferenceNode) createExpression(compoundAssignmentStmtNode.lhsExpression()));
bLCompAssignment.pos = getPosition(compoundAssignmentStmtNode);
bLCompAssignment.opKind = OperatorKind.valueFrom(compoundAssignmentStmtNode.binaryOperator().text());
return bLCompAssignment;
}
private void validateLvexpr(ExpressionNode lExprNode, DiagnosticCode errorCode) {
if (lExprNode.getKind() == NodeKind.INVOCATION) {
dlog.error(((BLangInvocation) lExprNode).pos, errorCode);
}
if (lExprNode.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR
|| lExprNode.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
validateLvexpr(((BLangAccessExpression) lExprNode).expr, errorCode);
}
}
@Override
public BLangNode transform(DoStatementNode doStatementNode) {
BLangDo bLDo = (BLangDo) TreeBuilder.createDoNode();
bLDo.pos = getPosition(doStatementNode);
BLangBlockStmt bLBlockStmt = (BLangBlockStmt) doStatementNode.blockStatement().apply(this);
bLBlockStmt.pos = getPosition(doStatementNode.blockStatement());
bLDo.setBody(bLBlockStmt);
doStatementNode.onFailClause().ifPresent(onFailClauseNode -> {
bLDo.setOnFailClause(
(org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this)));
});
return bLDo;
}
@Override
public BLangNode transform(FailStatementNode failStatementNode) {
BLangFail bLFail = (BLangFail) TreeBuilder.createFailNode();
bLFail.pos = getPosition(failStatementNode);
bLFail.expr = createExpression(failStatementNode.expression());
return bLFail;
}
@Override
public BLangNode transform(WhileStatementNode whileStmtNode) {
BLangWhile bLWhile = (BLangWhile) TreeBuilder.createWhileNode();
bLWhile.setCondition(createExpression(whileStmtNode.condition()));
bLWhile.pos = getPosition(whileStmtNode);
BLangBlockStmt bLBlockStmt = (BLangBlockStmt) whileStmtNode.whileBody().apply(this);
bLBlockStmt.pos = getPosition(whileStmtNode.whileBody());
bLWhile.setBody(bLBlockStmt);
whileStmtNode.onFailClause().ifPresent(onFailClauseNode -> {
bLWhile.setOnFailClause(
(org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this)));
});
return bLWhile;
}
@Override
public BLangNode transform(IfElseStatementNode ifElseStmtNode) {
BLangIf bLIf = (BLangIf) TreeBuilder.createIfElseStatementNode();
bLIf.pos = getPosition(ifElseStmtNode);
bLIf.setCondition(createExpression(ifElseStmtNode.condition()));
bLIf.setBody((BLangBlockStmt) ifElseStmtNode.ifBody().apply(this));
ifElseStmtNode.elseBody().ifPresent(elseBody -> {
ElseBlockNode elseNode = (ElseBlockNode) elseBody;
bLIf.setElseStatement(
(org.ballerinalang.model.tree.statements.StatementNode) elseNode.elseBody().apply(this));
});
return bLIf;
}
@Override
public BLangNode transform(BlockStatementNode blockStatement) {
BLangBlockStmt bLBlockStmt = (BLangBlockStmt) TreeBuilder.createBlockNode();
this.isInLocalContext = true;
bLBlockStmt.stmts = generateBLangStatements(blockStatement.statements());
this.isInLocalContext = false;
bLBlockStmt.pos = getPosition(blockStatement);
SyntaxKind parent = blockStatement.parent().kind();
if (parent == SyntaxKind.IF_ELSE_STATEMENT || parent == SyntaxKind.ELSE_BLOCK) {
bLBlockStmt.pos = expandLeft(bLBlockStmt.pos, getPosition(blockStatement.parent()));
}
return bLBlockStmt;
}
@Override
public BLangNode transform(RollbackStatementNode rollbackStatementNode) {
BLangRollback rollbackStmt = (BLangRollback) TreeBuilder.createRollbackNode();
rollbackStmt.pos = getPosition(rollbackStatementNode);
if (rollbackStatementNode.expression().isPresent()) {
rollbackStmt.expr = createExpression(rollbackStatementNode.expression().get());
}
return rollbackStmt;
}
@Override
public BLangNode transform(LockStatementNode lockStatementNode) {
BLangLock lockNode = (BLangLock) TreeBuilder.createLockNode();
lockNode.pos = getPosition(lockStatementNode);
BLangBlockStmt lockBlock = (BLangBlockStmt) lockStatementNode.blockStatement().apply(this);
lockBlock.pos = getPosition(lockStatementNode.blockStatement());
lockNode.setBody(lockBlock);
lockStatementNode.onFailClause().ifPresent(onFailClauseNode -> {
lockNode.setOnFailClause(
(org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this)));
});
return lockNode;
}
@Override
public BLangNode transform(TypedescTypeDescriptorNode typedescTypeDescriptorNode) {
BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
refType.typeKind = TypeKind.TYPEDESC;
Optional<TypeParameterNode> node = typedescTypeDescriptorNode.typedescTypeParamsNode();
if (node.isPresent()) {
BLangConstrainedType constrainedType = (BLangConstrainedType) TreeBuilder.createConstrainedTypeNode();
constrainedType.type = refType;
constrainedType.constraint = createTypeNode(node.get().typeNode());
constrainedType.pos = getPosition(typedescTypeDescriptorNode);
return constrainedType;
}
return refType;
}
@Override
public BLangNode transform(VariableDeclarationNode varDeclaration) {
return (BLangNode) createBLangVarDef(getPosition(varDeclaration), varDeclaration.typedBindingPattern(),
varDeclaration.initializer(), varDeclaration.finalKeyword());
}
public BLangNode transform(XmlTypeDescriptorNode xmlTypeDescriptorNode) {
BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
refType.typeKind = TypeKind.XML;
refType.pos = getPosition(xmlTypeDescriptorNode);
Optional<TypeParameterNode> node = xmlTypeDescriptorNode.xmlTypeParamsNode();
if (node.isPresent()) {
BLangConstrainedType constrainedType = (BLangConstrainedType) TreeBuilder.createConstrainedTypeNode();
constrainedType.type = refType;
constrainedType.constraint = createTypeNode(node.get().typeNode());
constrainedType.pos = getPosition(xmlTypeDescriptorNode);
return constrainedType;
}
return refType;
}
private VariableDefinitionNode createBLangVarDef(Location location,
TypedBindingPatternNode typedBindingPattern,
Optional<io.ballerina.compiler.syntax.tree.ExpressionNode> initializer,
Optional<Token> finalKeyword) {
BindingPatternNode bindingPattern = typedBindingPattern.bindingPattern();
BLangVariable variable = getBLangVariableNode(bindingPattern);
List<Token> qualifiers = new ArrayList<>();
if (finalKeyword.isPresent()) {
qualifiers.add(finalKeyword.get());
}
NodeList<Token> qualifierList = NodeFactory.createNodeList(qualifiers);
switch (bindingPattern.kind()) {
case CAPTURE_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
BLangSimpleVariableDef bLVarDef =
(BLangSimpleVariableDef) TreeBuilder.createSimpleVariableDefinitionNode();
bLVarDef.pos = variable.pos = location;
BLangExpression expr = initializer.isPresent() ? createExpression(initializer.get()) : null;
variable.setInitialExpression(expr);
bLVarDef.setVariable(variable);
if (finalKeyword.isPresent()) {
variable.flagSet.add(Flag.FINAL);
}
TypeDescriptorNode typeDesc = typedBindingPattern.typeDescriptor();
variable.isDeclaredWithVar = isDeclaredWithVar(typeDesc);
if (!variable.isDeclaredWithVar) {
variable.setTypeNode(createTypeNode(typeDesc));
}
return bLVarDef;
case MAPPING_BINDING_PATTERN:
initializeBLangVariable(variable, typedBindingPattern.typeDescriptor(), initializer,
qualifierList);
return createRecordVariableDef(variable);
case LIST_BINDING_PATTERN:
initializeBLangVariable(variable, typedBindingPattern.typeDescriptor(), initializer,
qualifierList);
return createTupleVariableDef(variable);
case ERROR_BINDING_PATTERN:
initializeBLangVariable(variable, typedBindingPattern.typeDescriptor(), initializer,
qualifierList);
return createErrorVariableDef(variable);
default:
throw new RuntimeException(
"Syntax kind is not a valid binding pattern " + typedBindingPattern.bindingPattern().kind());
}
}
private void initializeBLangVariable(BLangVariable var, TypeDescriptorNode type,
Optional<io.ballerina.compiler.syntax.tree.ExpressionNode> initializer,
NodeList<Token> qualifiers) {
for (Token qualifier : qualifiers) {
SyntaxKind kind = qualifier.kind();
if (kind == SyntaxKind.FINAL_KEYWORD) {
markVariableWithFlag(var, Flag.FINAL);
} else if (qualifier.kind() == SyntaxKind.CONFIGURABLE_KEYWORD) {
var.flagSet.add(Flag.CONFIGURABLE);
if (initializer.get().kind() == SyntaxKind.REQUIRED_EXPRESSION) {
var.flagSet.add(Flag.REQUIRED);
initializer = Optional.empty();
}
} else if (kind == SyntaxKind.ISOLATED_KEYWORD) {
var.flagSet.add(Flag.ISOLATED);
}
}
var.isDeclaredWithVar = isDeclaredWithVar(type);
if (!var.isDeclaredWithVar) {
var.setTypeNode(createTypeNode(type));
}
if (initializer.isPresent()) {
var.setInitialExpression(createExpression(initializer.get()));
}
}
private BLangRecordVariableDef createRecordVariableDef(BLangVariable var) {
BLangRecordVariableDef varDefNode = (BLangRecordVariableDef) TreeBuilder.createRecordVariableDefinitionNode();
varDefNode.pos = var.pos;
varDefNode.setVariable(var);
return varDefNode;
}
private BLangTupleVariableDef createTupleVariableDef(BLangVariable tupleVar) {
BLangTupleVariableDef varDefNode = (BLangTupleVariableDef) TreeBuilder.createTupleVariableDefinitionNode();
varDefNode.pos = getPosition(null);
varDefNode.setVariable(tupleVar);
return varDefNode;
}
private BLangErrorVariableDef createErrorVariableDef(BLangVariable errorVar) {
BLangErrorVariableDef varDefNode = (BLangErrorVariableDef) TreeBuilder.createErrorVariableDefinitionNode();
varDefNode.pos = getPosition(null);
varDefNode.setVariable(errorVar);
return varDefNode;
}
@Override
public BLangNode transform(ExpressionStatementNode expressionStatement) {
SyntaxKind kind = expressionStatement.expression().kind();
switch (kind) {
case ASYNC_SEND_ACTION:
return expressionStatement.expression().apply(this);
default:
BLangExpressionStmt bLExpressionStmt =
(BLangExpressionStmt) TreeBuilder.createExpressionStatementNode();
bLExpressionStmt.expr = createExpression(expressionStatement.expression());
bLExpressionStmt.pos = getPosition(expressionStatement);
return bLExpressionStmt;
}
}
@Override
public BLangNode transform(AsyncSendActionNode asyncSendActionNode) {
BLangWorkerSend workerSendNode = (BLangWorkerSend) TreeBuilder.createWorkerSendNode();
workerSendNode.setWorkerName(createIdentifier(getPosition(asyncSendActionNode.peerWorker()),
asyncSendActionNode.peerWorker().name()));
workerSendNode.expr = createExpression(asyncSendActionNode.expression());
workerSendNode.pos = getPosition(asyncSendActionNode);
return workerSendNode;
}
@Override
public BLangNode transform(WaitActionNode waitActionNode) {
Node waitFutureExpr = waitActionNode.waitFutureExpr();
if (waitFutureExpr.kind() == SyntaxKind.WAIT_FIELDS_LIST) {
return getWaitForAllExpr((WaitFieldsListNode) waitFutureExpr);
}
BLangWaitExpr waitExpr = TreeBuilder.createWaitExpressionNode();
waitExpr.pos = getPosition(waitActionNode);
waitExpr.exprList = Collections.singletonList(createExpression(waitFutureExpr));
return waitExpr;
}
private BLangWaitForAllExpr getWaitForAllExpr(WaitFieldsListNode waitFields) {
BLangWaitForAllExpr bLangWaitForAll = TreeBuilder.createWaitForAllExpressionNode();
List<BLangWaitKeyValue> exprs = new ArrayList<>();
for (Node waitField : waitFields.waitFields()) {
exprs.add(getWaitForAllExpr(waitField));
}
bLangWaitForAll.keyValuePairs = exprs;
bLangWaitForAll.pos = getPosition(waitFields);
return bLangWaitForAll;
}
private BLangWaitKeyValue getWaitForAllExpr(Node waitFields) {
BLangWaitForAllExpr.BLangWaitKeyValue keyValue = TreeBuilder.createWaitKeyValueNode();
keyValue.pos = getPosition(waitFields);
if (waitFields.kind() == SyntaxKind.WAIT_FIELD) {
WaitFieldNode waitFieldNode = (WaitFieldNode) waitFields;
BLangIdentifier key = createIdentifier(waitFieldNode.fieldName().name());
key.setLiteral(false);
keyValue.key = key;
keyValue.valueExpr = createExpression(waitFieldNode.waitFutureExpr());
return keyValue;
}
SimpleNameReferenceNode varName = (SimpleNameReferenceNode) waitFields;
BLangIdentifier key = createIdentifier(varName.name());
key.setLiteral(false);
keyValue.key = key;
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = getPosition(varName);
varRef.variableName = key;
varRef.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
keyValue.keyExpr = varRef;
return keyValue;
}
@Override
public BLangNode transform(StartActionNode startActionNode) {
BLangNode expression = createActionOrExpression(startActionNode.expression());
BLangInvocation invocation;
if (!(expression instanceof BLangWorkerSend)) {
invocation = (BLangInvocation) expression;
} else {
invocation = (BLangInvocation) ((BLangWorkerSend) expression).expr;
expression = ((BLangWorkerSend) expression).expr;
}
if (expression.getKind() == NodeKind.INVOCATION) {
BLangActionInvocation actionInvocation = (BLangActionInvocation) TreeBuilder.createActionInvocation();
actionInvocation.expr = invocation.expr;
actionInvocation.pkgAlias = invocation.pkgAlias;
actionInvocation.name = invocation.name;
actionInvocation.argExprs = invocation.argExprs;
actionInvocation.flagSet = invocation.flagSet;
actionInvocation.pos = getPosition(startActionNode);
invocation = actionInvocation;
}
invocation.async = true;
invocation.annAttachments = applyAll(startActionNode.annotations());
return invocation;
}
@Override
public BLangNode transform(TransactionStatementNode transactionStatementNode) {
BLangTransaction transaction = (BLangTransaction) TreeBuilder.createTransactionNode();
BLangBlockStmt transactionBlock = (BLangBlockStmt) transactionStatementNode.blockStatement().apply(this);
transactionBlock.pos = getPosition(transactionStatementNode.blockStatement());
transaction.setTransactionBody(transactionBlock);
transaction.pos = getPosition(transactionStatementNode);
transactionStatementNode.onFailClause().ifPresent(onFailClauseNode -> {
transaction.setOnFailClause(
(org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this)));
});
return transaction;
}
@Override
public BLangNode transform(PositionalArgumentNode argumentNode) {
return createExpression(argumentNode.expression());
}
@Override
public BLangNode transform(NamedArgumentNode namedArgumentNode) {
BLangNamedArgsExpression namedArg = (BLangNamedArgsExpression) TreeBuilder.createNamedArgNode();
namedArg.pos = getPosition(namedArgumentNode);
namedArg.name = this.createIdentifier(namedArgumentNode.argumentName().name());
namedArg.expr = createExpression(namedArgumentNode.expression());
return namedArg;
}
@Override
public BLangNode transform(RestArgumentNode restArgumentNode) {
BLangRestArgsExpression varArgs = (BLangRestArgsExpression) TreeBuilder.createVarArgsNode();
varArgs.pos = getPosition(restArgumentNode.ellipsis());
varArgs.expr = createExpression(restArgumentNode.expression());
return varArgs;
}
@Override
public BLangNode transform(RequiredParameterNode requiredParameter) {
BLangSimpleVariable simpleVar = createSimpleVar(requiredParameter.paramName(),
requiredParameter.typeName(), requiredParameter.annotations());
simpleVar.pos = getPosition(requiredParameter);
if (requiredParameter.paramName().isPresent()) {
simpleVar.name.pos = getPosition(requiredParameter.paramName().get());
}
simpleVar.flagSet.add(Flag.REQUIRED_PARAM);
simpleVar.pos = trimLeft(simpleVar.pos, getPosition(requiredParameter.typeName()));
return simpleVar;
}
@Override
public BLangNode transform(IncludedRecordParameterNode includedRecordParameterNode) {
BLangSimpleVariable simpleVar = createSimpleVar(includedRecordParameterNode.paramName(),
includedRecordParameterNode.typeName(), includedRecordParameterNode.annotations());
simpleVar.flagSet.add(INCLUDED);
simpleVar.pos = getPosition(includedRecordParameterNode);
if (includedRecordParameterNode.paramName().isPresent()) {
simpleVar.name.pos = getPosition(includedRecordParameterNode.paramName().get());
}
simpleVar.pos = trimLeft(simpleVar.pos, getPosition(includedRecordParameterNode.typeName()));
return simpleVar;
}
@Override
public BLangNode transform(DefaultableParameterNode defaultableParameter) {
BLangSimpleVariable simpleVar = createSimpleVar(defaultableParameter.paramName(),
defaultableParameter.typeName(),
defaultableParameter.annotations());
simpleVar.setInitialExpression(createExpression(defaultableParameter.expression()));
simpleVar.flagSet.add(Flag.DEFAULTABLE_PARAM);
simpleVar.pos = getPosition(defaultableParameter);
return simpleVar;
}
@Override
public BLangNode transform(RestParameterNode restParameter) {
BLangSimpleVariable bLSimpleVar = createSimpleVar(restParameter.paramName(), restParameter.typeName(),
restParameter.annotations());
BLangArrayType bLArrayType = (BLangArrayType) TreeBuilder.createArrayTypeNode();
bLArrayType.elemtype = bLSimpleVar.typeNode;
bLArrayType.dimensions = 1;
bLSimpleVar.typeNode = bLArrayType;
bLArrayType.pos = getPosition(restParameter.typeName());
bLSimpleVar.flagSet.add(Flag.REST_PARAM);
bLSimpleVar.pos = getPosition(restParameter);
return bLSimpleVar;
}
@Override
public BLangNode transform(OptionalTypeDescriptorNode optTypeDescriptor) {
BLangValueType nilTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
nilTypeNode.pos = getPosition(optTypeDescriptor.questionMarkToken());
nilTypeNode.typeKind = TypeKind.NIL;
BLangUnionTypeNode unionTypeNode = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode();
unionTypeNode.memberTypeNodes.add(createTypeNode(optTypeDescriptor.typeDescriptor()));
unionTypeNode.memberTypeNodes.add(nilTypeNode);
unionTypeNode.nullable = true;
unionTypeNode.pos = getPosition(optTypeDescriptor);
return unionTypeNode;
}
@Override
public BLangNode transform(FunctionTypeDescriptorNode functionTypeDescriptorNode) {
BLangFunctionTypeNode functionTypeNode = (BLangFunctionTypeNode) TreeBuilder.createFunctionTypeNode();
functionTypeNode.pos = getPosition(functionTypeDescriptorNode);
functionTypeNode.returnsKeywordExists = true;
if (functionTypeDescriptorNode.functionSignature().isPresent()) {
FunctionSignatureNode funcSignature = functionTypeDescriptorNode.functionSignature().get();
for (ParameterNode child : funcSignature.parameters()) {
SimpleVariableNode param = (SimpleVariableNode) child.apply(this);
if (child.kind() == SyntaxKind.REST_PARAM) {
functionTypeNode.restParam = (BLangSimpleVariable) param;
} else {
functionTypeNode.params.add((BLangVariable) param);
}
}
Optional<ReturnTypeDescriptorNode> retNode = funcSignature.returnTypeDesc();
if (retNode.isPresent()) {
ReturnTypeDescriptorNode returnType = retNode.get();
functionTypeNode.returnTypeNode = createTypeNode(returnType.type());
} else {
BLangValueType bLValueType = (BLangValueType) TreeBuilder.createValueTypeNode();
bLValueType.pos = getPosition(funcSignature);
bLValueType.typeKind = TypeKind.NIL;
functionTypeNode.returnTypeNode = bLValueType;
}
} else {
functionTypeNode.flagSet.add(Flag.ANY_FUNCTION);
}
functionTypeNode.flagSet.add(Flag.PUBLIC);
for (Token token : functionTypeDescriptorNode.qualifierList()) {
if (token.kind() == SyntaxKind.ISOLATED_KEYWORD) {
functionTypeNode.flagSet.add(Flag.ISOLATED);
} else if (token.kind() == SyntaxKind.TRANSACTIONAL_KEYWORD) {
functionTypeNode.flagSet.add(Flag.TRANSACTIONAL);
}
}
return functionTypeNode;
}
@Override
public BLangNode transform(ParameterizedTypeDescriptorNode parameterizedTypeDescNode) {
BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
BLangBuiltInRefTypeNode typeNode =
(BLangBuiltInRefTypeNode) createBuiltInTypeNode(parameterizedTypeDescNode.parameterizedType());
refType.typeKind = typeNode.typeKind;
refType.pos = typeNode.pos;
BLangConstrainedType constrainedType = (BLangConstrainedType) TreeBuilder.createConstrainedTypeNode();
constrainedType.type = refType;
constrainedType.constraint = createTypeNode(parameterizedTypeDescNode.typeParameter().typeNode());
constrainedType.pos = getPosition(parameterizedTypeDescNode);
return constrainedType;
}
@Override
public BLangNode transform(KeySpecifierNode keySpecifierNode) {
BLangTableKeySpecifier tableKeySpecifierNode =
(BLangTableKeySpecifier) TreeBuilder.createTableKeySpecifierNode();
tableKeySpecifierNode.pos = getPosition(keySpecifierNode);
for (Token field : keySpecifierNode.fieldNames()) {
tableKeySpecifierNode.addFieldNameIdentifier(createIdentifier(field));
}
return tableKeySpecifierNode;
}
@Override
public BLangNode transform(KeyTypeConstraintNode keyTypeConstraintNode) {
BLangTableKeyTypeConstraint tableKeyTypeConstraint = new BLangTableKeyTypeConstraint();
tableKeyTypeConstraint.pos = getPosition(keyTypeConstraintNode);
tableKeyTypeConstraint.keyType = createTypeNode(keyTypeConstraintNode.typeParameterNode());
return tableKeyTypeConstraint;
}
@Override
public BLangNode transform(TableTypeDescriptorNode tableTypeDescriptorNode) {
BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
refType.typeKind = TreeUtils.stringToTypeKind(tableTypeDescriptorNode.tableKeywordToken().text());
refType.pos = getPosition(tableTypeDescriptorNode);
BLangTableTypeNode tableTypeNode = (BLangTableTypeNode) TreeBuilder.createTableTypeNode();
tableTypeNode.pos = getPosition(tableTypeDescriptorNode);
tableTypeNode.type = refType;
tableTypeNode.constraint = createTypeNode(tableTypeDescriptorNode.rowTypeParameterNode());
if (tableTypeDescriptorNode.keyConstraintNode().isPresent()) {
Node constraintNode = tableTypeDescriptorNode.keyConstraintNode().get();
if (constraintNode.kind() == SyntaxKind.KEY_TYPE_CONSTRAINT) {
tableTypeNode.tableKeyTypeConstraint = (BLangTableKeyTypeConstraint) constraintNode.apply(this);
} else if (constraintNode.kind() == SyntaxKind.KEY_SPECIFIER) {
tableTypeNode.tableKeySpecifier = (BLangTableKeySpecifier) constraintNode.apply(this);
}
}
tableTypeNode.isTypeInlineDefined = checkIfAnonymous(tableTypeDescriptorNode);
return tableTypeNode;
}
@Override
public BLangNode transform(SimpleNameReferenceNode simpleNameRefNode) {
BLangUserDefinedType bLUserDefinedType = new BLangUserDefinedType();
bLUserDefinedType.pos = getPosition(simpleNameRefNode);
bLUserDefinedType.typeName =
createIdentifier(simpleNameRefNode.name());
bLUserDefinedType.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
return bLUserDefinedType;
}
@Override
public BLangNode transform(QualifiedNameReferenceNode qualifiedNameReferenceNode) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = getPosition(qualifiedNameReferenceNode);
varRef.variableName = createIdentifier(qualifiedNameReferenceNode.identifier());
varRef.pkgAlias = createIdentifier(qualifiedNameReferenceNode.modulePrefix());
return varRef;
}
@Override
public BLangNode transform(XMLProcessingInstruction xmlProcessingInstruction) {
BLangXMLProcInsLiteral xmlProcInsLiteral =
(BLangXMLProcInsLiteral) TreeBuilder.createXMLProcessingIntsructionLiteralNode();
if (xmlProcessingInstruction.data().isEmpty()) {
BLangLiteral emptyLiteral = createEmptyLiteral();
emptyLiteral.pos = getPosition(xmlProcessingInstruction);
xmlProcInsLiteral.dataFragments.add(emptyLiteral);
} else {
for (Node dataNode : xmlProcessingInstruction.data()) {
xmlProcInsLiteral.dataFragments.add(createExpression(dataNode));
}
}
XMLNameNode target = xmlProcessingInstruction.target();
if (target.kind() == SyntaxKind.XML_SIMPLE_NAME) {
xmlProcInsLiteral.target = createSimpleLiteral(((XMLSimpleNameNode) target).name());
} else {
xmlProcInsLiteral.target = createSimpleLiteral(((XMLQualifiedNameNode) target).prefix());
}
xmlProcInsLiteral.pos = getPosition(xmlProcessingInstruction);
return xmlProcInsLiteral;
}
@Override
public BLangNode transform(XMLComment xmlComment) {
BLangXMLCommentLiteral xmlCommentLiteral = (BLangXMLCommentLiteral) TreeBuilder.createXMLCommentLiteralNode();
Location pos = getPosition(xmlComment);
if (xmlComment.content().isEmpty()) {
BLangLiteral emptyLiteral = createEmptyLiteral();
emptyLiteral.pos = pos;
xmlCommentLiteral.textFragments.add(emptyLiteral);
} else {
for (Node commentNode : xmlComment.content()) {
xmlCommentLiteral.textFragments.add(createExpression(commentNode));
}
}
xmlCommentLiteral.pos = pos;
return xmlCommentLiteral;
}
@Override
public BLangNode transform(XMLElementNode xmlElementNode) {
BLangXMLElementLiteral xmlElement = (BLangXMLElementLiteral) TreeBuilder.createXMLElementLiteralNode();
xmlElement.startTagName = createExpression(xmlElementNode.startTag());
xmlElement.endTagName = createExpression(xmlElementNode.endTag());
for (Node node : xmlElementNode.content()) {
if (node.kind() == SyntaxKind.XML_TEXT) {
xmlElement.children.add(createSimpleLiteral(((XMLTextNode) node).content()));
continue;
}
xmlElement.children.add(createExpression(node));
}
for (XMLAttributeNode attribute : xmlElementNode.startTag().attributes()) {
xmlElement.attributes.add((BLangXMLAttribute) attribute.apply(this));
}
xmlElement.pos = getPosition(xmlElementNode);
xmlElement.isRoot = true;
return xmlElement;
}
@Override
public BLangNode transform(XMLAttributeNode xmlAttributeNode) {
BLangXMLAttribute xmlAttribute = (BLangXMLAttribute) TreeBuilder.createXMLAttributeNode();
xmlAttribute.value = (BLangXMLQuotedString) xmlAttributeNode.value().apply(this);
xmlAttribute.name = createExpression(xmlAttributeNode.attributeName());
xmlAttribute.pos = getPosition(xmlAttributeNode);
return xmlAttribute;
}
@Override
public BLangNode transform(ByteArrayLiteralNode byteArrayLiteralNode) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.pos = getPosition(byteArrayLiteralNode);
literal.type = symTable.getTypeFromTag(TypeTags.BYTE_ARRAY);
literal.type.tag = TypeTags.BYTE_ARRAY;
literal.value = getValueFromByteArrayNode(byteArrayLiteralNode);
literal.originalValue = String.valueOf(literal.value);
return literal;
}
@Override
public BLangNode transform(XMLAttributeValue xmlAttributeValue) {
BLangXMLQuotedString quotedString = (BLangXMLQuotedString) TreeBuilder.createXMLQuotedStringNode();
quotedString.pos = getPosition(xmlAttributeValue);
if (xmlAttributeValue.startQuote().kind() == SyntaxKind.SINGLE_QUOTE_TOKEN) {
quotedString.quoteType = QuoteType.SINGLE_QUOTE;
} else {
quotedString.quoteType = QuoteType.DOUBLE_QUOTE;
}
if (xmlAttributeValue.value().isEmpty()) {
BLangLiteral emptyLiteral = createEmptyLiteral();
emptyLiteral.pos = getPosition(xmlAttributeValue);
quotedString.textFragments.add(emptyLiteral);
} else if (xmlAttributeValue.value().size() == 1 &&
xmlAttributeValue.value().get(0).kind() == SyntaxKind.INTERPOLATION) {
quotedString.textFragments.add(createExpression(xmlAttributeValue.value().get(0)));
BLangLiteral emptyLiteral = createEmptyLiteral();
emptyLiteral.pos = getPosition(xmlAttributeValue);
quotedString.textFragments.add(emptyLiteral);
} else {
for (Node value : xmlAttributeValue.value()) {
quotedString.textFragments.add(createExpression(value));
}
}
return quotedString;
}
@Override
public BLangNode transform(XMLStartTagNode startTagNode) {
return startTagNode.name().apply(this);
}
@Override
public BLangNode transform(XMLEndTagNode endTagNode) {
return endTagNode.name().apply(this);
}
@Override
public BLangNode transform(XMLTextNode xmlTextNode) {
return createExpression(xmlTextNode.content());
}
private BLangNode createXMLLiteral(TemplateExpressionNode expressionNode) {
BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode();
if (expressionNode.content().isEmpty()) {
xmlTextLiteral.pos = getPosition(expressionNode);
xmlTextLiteral.textFragments.add(createEmptyStringLiteral(xmlTextLiteral.pos));
return xmlTextLiteral;
}
xmlTextLiteral.pos = getPosition(expressionNode.content().get(0));
for (Node node : expressionNode.content()) {
xmlTextLiteral.textFragments.add(createExpression(node));
}
return xmlTextLiteral;
}
@Override
public BLangNode transform(XMLNamespaceDeclarationNode xmlnsDeclNode) {
BLangXMLNS xmlns = (BLangXMLNS) TreeBuilder.createXMLNSNode();
BLangIdentifier prefixIdentifier = createIdentifier(xmlnsDeclNode.namespacePrefix().orElse(null));
BLangExpression namespaceUri = createExpression(xmlnsDeclNode.namespaceuri());
xmlns.namespaceURI = namespaceUri;
xmlns.prefix = prefixIdentifier;
xmlns.pos = getPosition(xmlnsDeclNode);
BLangXMLNSStatement xmlnsStmt = (BLangXMLNSStatement) TreeBuilder.createXMLNSDeclrStatementNode();
xmlnsStmt.xmlnsDecl = xmlns;
xmlnsStmt.pos = getPosition(xmlnsDeclNode);
return xmlnsStmt;
}
@Override
public BLangNode transform(ModuleXMLNamespaceDeclarationNode xmlnsDeclNode) {
BLangXMLNS xmlns = (BLangXMLNS) TreeBuilder.createXMLNSNode();
BLangIdentifier prefixIdentifier = createIdentifier(xmlnsDeclNode.namespacePrefix().orElse(null));
BLangExpression namespaceUri = createExpression(xmlnsDeclNode.namespaceuri());
xmlns.namespaceURI = namespaceUri;
xmlns.prefix = prefixIdentifier;
xmlns.pos = getPosition(xmlnsDeclNode);
return xmlns;
}
@Override
public BLangNode transform(XMLQualifiedNameNode xmlQualifiedNameNode) {
BLangXMLQName xmlName = (BLangXMLQName) TreeBuilder.createXMLQNameNode();
xmlName.localname = createIdentifier(getPosition(xmlQualifiedNameNode.name()),
xmlQualifiedNameNode.name().name());
xmlName.prefix = createIdentifier(getPosition(xmlQualifiedNameNode.prefix()),
xmlQualifiedNameNode.prefix().name());
xmlName.pos = getPosition(xmlQualifiedNameNode);
return xmlName;
}
@Override
public BLangNode transform(XMLSimpleNameNode xmlSimpleNameNode) {
BLangXMLQName xmlName = (BLangXMLQName) TreeBuilder.createXMLQNameNode();
xmlName.localname = createIdentifier(xmlSimpleNameNode.name());
xmlName.prefix = createIdentifier(null, "");
xmlName.pos = getPosition(xmlSimpleNameNode);
return xmlName;
}
@Override
public BLangNode transform(XMLEmptyElementNode xMLEmptyElementNode) {
BLangXMLElementLiteral xmlEmptyElement = (BLangXMLElementLiteral) TreeBuilder.createXMLElementLiteralNode();
xmlEmptyElement.startTagName = createExpression(xMLEmptyElementNode.name());
for (XMLAttributeNode attribute : xMLEmptyElementNode.attributes()) {
xmlEmptyElement.attributes.add((BLangXMLAttribute) attribute.apply(this));
}
xmlEmptyElement.pos = getPosition(xMLEmptyElementNode);
return xmlEmptyElement;
}
@Override
public BLangNode transform(RemoteMethodCallActionNode remoteMethodCallActionNode) {
BLangInvocation.BLangActionInvocation bLangActionInvocation = (BLangInvocation.BLangActionInvocation)
TreeBuilder.createActionInvocation();
bLangActionInvocation.expr = createExpression(remoteMethodCallActionNode.expression());
bLangActionInvocation.argExprs = applyAll(remoteMethodCallActionNode.arguments());
BLangNameReference nameReference = createBLangNameReference(remoteMethodCallActionNode.methodName().name());
bLangActionInvocation.name = (BLangIdentifier) nameReference.name;
bLangActionInvocation.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
bLangActionInvocation.pos = getPosition(remoteMethodCallActionNode);
return bLangActionInvocation;
}
@Override
public BLangNode transform(StreamTypeDescriptorNode streamTypeDescriptorNode) {
BLangType constraint, error = null;
Location pos = getPosition(streamTypeDescriptorNode);
Optional<Node> paramsNode = streamTypeDescriptorNode.streamTypeParamsNode();
boolean hasConstraint = paramsNode.isPresent();
if (!hasConstraint) {
constraint = addValueType(pos, TypeKind.ANY);
} else {
StreamTypeParamsNode params = (StreamTypeParamsNode) paramsNode.get();
if (params.rightTypeDescNode().isPresent()) {
error = createTypeNode(params.rightTypeDescNode().get());
}
constraint = createTypeNode(params.leftTypeDescNode());
}
BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
refType.typeKind = TypeKind.STREAM;
refType.pos = pos;
BLangStreamType streamType = (BLangStreamType) TreeBuilder.createStreamTypeNode();
streamType.type = refType;
streamType.constraint = constraint;
streamType.error = error;
streamType.pos = pos;
return streamType;
}
@Override
public BLangNode transform(ArrayTypeDescriptorNode arrayTypeDescriptorNode) {
int dimensions = 1;
List<BLangExpression> sizes = new ArrayList<>();
Location position = getPosition(arrayTypeDescriptorNode);
while (true) {
if (!arrayTypeDescriptorNode.arrayLength().isPresent()) {
sizes.add(new BLangLiteral(Integer.valueOf(OPEN_ARRAY_INDICATOR), symTable.intType));
} else {
Node keyExpr = arrayTypeDescriptorNode.arrayLength().get();
if (keyExpr.kind() == SyntaxKind.NUMERIC_LITERAL) {
BasicLiteralNode numericLiteralNode = (BasicLiteralNode) keyExpr;
if (numericLiteralNode.literalToken().kind() == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN) {
sizes.add(new BLangLiteral(Integer.parseInt(keyExpr.toString()), symTable.intType));
} else {
sizes.add(new BLangLiteral(Integer.parseInt(keyExpr.toString(), 16), symTable.intType));
}
} else if (keyExpr.kind() == SyntaxKind.ASTERISK_LITERAL) {
sizes.add(new BLangLiteral(Integer.valueOf(INFERRED_ARRAY_INDICATOR), symTable.intType));
} else {
sizes.add(createExpression(keyExpr));
}
}
if (arrayTypeDescriptorNode.memberTypeDesc().kind() != SyntaxKind.ARRAY_TYPE_DESC) {
break;
}
arrayTypeDescriptorNode = (ArrayTypeDescriptorNode) arrayTypeDescriptorNode.memberTypeDesc();
dimensions++;
}
BLangArrayType arrayTypeNode = (BLangArrayType) TreeBuilder.createArrayTypeNode();
arrayTypeNode.pos = position;
arrayTypeNode.elemtype = createTypeNode(arrayTypeDescriptorNode.memberTypeDesc());
arrayTypeNode.dimensions = dimensions;
arrayTypeNode.sizes = sizes.toArray(new BLangExpression[0]);
return arrayTypeNode;
}
public BLangNode transform(EnumDeclarationNode enumDeclarationNode) {
Boolean publicQualifier = false;
if (enumDeclarationNode.qualifier().isPresent() && enumDeclarationNode.qualifier().get().kind()
== SyntaxKind.PUBLIC_KEYWORD) {
publicQualifier = true;
}
for (Node member : enumDeclarationNode.enumMemberList()) {
addToTop(transformEnumMember((EnumMemberNode) member, publicQualifier));
}
BLangTypeDefinition bLangTypeDefinition = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
if (publicQualifier) {
bLangTypeDefinition.flagSet.add(Flag.PUBLIC);
}
bLangTypeDefinition.flagSet.add(Flag.ENUM);
bLangTypeDefinition.setName((BLangIdentifier) transform(enumDeclarationNode.identifier()));
bLangTypeDefinition.pos = getPosition(enumDeclarationNode);
BLangUnionTypeNode bLangUnionTypeNode = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode();
bLangUnionTypeNode.pos = bLangTypeDefinition.pos;
for (Node member : enumDeclarationNode.enumMemberList()) {
bLangUnionTypeNode.memberTypeNodes.add(createTypeNode(((EnumMemberNode) member).identifier()));
}
Collections.reverse(bLangUnionTypeNode.memberTypeNodes);
bLangTypeDefinition.setTypeNode(bLangUnionTypeNode);
bLangTypeDefinition.annAttachments = applyAll(getAnnotations(enumDeclarationNode.metadata()));
bLangTypeDefinition.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(enumDeclarationNode.metadata()));
return bLangTypeDefinition;
}
public BLangConstant transformEnumMember(EnumMemberNode member, Boolean publicQualifier) {
BLangConstant bLangConstant = (BLangConstant) TreeBuilder.createConstantNode();
bLangConstant.pos = getPosition(member);
bLangConstant.flagSet.add(Flag.CONSTANT);
if (publicQualifier) {
bLangConstant.flagSet.add(Flag.PUBLIC);
}
bLangConstant.annAttachments = applyAll(getAnnotations(member.metadata()));
bLangConstant.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(member.metadata()));
bLangConstant.setName((BLangIdentifier) transform(member.identifier()));
BLangExpression deepLiteral;
if (member.constExprNode().isPresent()) {
BLangExpression expression = createExpression(member.constExprNode().orElse(null));
bLangConstant.setInitialExpression(expression);
deepLiteral = createExpression(member.constExprNode().orElse(null));
} else {
BLangLiteral literal = createSimpleLiteral(member.identifier());
bLangConstant.setInitialExpression(literal);
deepLiteral = createSimpleLiteral(member.identifier());
}
BLangValueType typeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
typeNode.typeKind = TypeKind.STRING;
bLangConstant.setTypeNode(typeNode);
if (deepLiteral instanceof BLangLiteral) {
BLangLiteral literal = (BLangLiteral) deepLiteral;
if (literal.originalValue != "") {
BLangFiniteTypeNode typeNodeAssociated = (BLangFiniteTypeNode) TreeBuilder.createFiniteTypeNode();
literal.originalValue = null;
typeNodeAssociated.addValue(deepLiteral);
bLangConstant.associatedTypeDefinition = createTypeDefinitionWithTypeNode(typeNodeAssociated);
} else {
bLangConstant.associatedTypeDefinition = null;
}
} else {
BLangFiniteTypeNode typeNodeAssociated = (BLangFiniteTypeNode) TreeBuilder.createFiniteTypeNode();
typeNodeAssociated.addValue(deepLiteral);
bLangConstant.associatedTypeDefinition = createTypeDefinitionWithTypeNode(typeNodeAssociated);
}
return bLangConstant;
}
@Override
public BLangNode transform(QueryExpressionNode queryExprNode) {
BLangQueryExpr queryExpr = (BLangQueryExpr) TreeBuilder.createQueryExpressionNode();
queryExpr.pos = getPosition(queryExprNode);
BLangFromClause fromClause = (BLangFromClause) queryExprNode.queryPipeline().fromClause().apply(this);
queryExpr.queryClauseList.add(fromClause);
for (Node clauseNode : queryExprNode.queryPipeline().intermediateClauses()) {
queryExpr.queryClauseList.add(clauseNode.apply(this));
}
BLangSelectClause selectClause = (BLangSelectClause) queryExprNode.selectClause().apply(this);
queryExpr.queryClauseList.add(selectClause);
Optional<OnConflictClauseNode> onConflict = queryExprNode.onConflictClause();
onConflict.ifPresent(onConflictClauseNode -> queryExpr.queryClauseList.add(onConflictClauseNode.apply(this)));
boolean isTable = false;
boolean isStream = false;
Optional<QueryConstructTypeNode> optionalQueryConstructTypeNode = queryExprNode.queryConstructType();
if (optionalQueryConstructTypeNode.isPresent()) {
QueryConstructTypeNode queryConstructTypeNode = optionalQueryConstructTypeNode.get();
isTable = queryConstructTypeNode.keyword().kind() == SyntaxKind.TABLE_KEYWORD;
isStream = queryConstructTypeNode.keyword().kind() == SyntaxKind.STREAM_KEYWORD;
if (queryConstructTypeNode.keySpecifier().isPresent()) {
for (IdentifierToken fieldNameNode : queryConstructTypeNode.keySpecifier().get().fieldNames()) {
queryExpr.fieldNameIdentifierList.add(createIdentifier(getPosition(fieldNameNode), fieldNameNode));
}
}
}
queryExpr.isStream = isStream;
queryExpr.isTable = isTable;
return queryExpr;
}
public BLangNode transform(OnFailClauseNode onFailClauseNode) {
Location pos = getPosition(onFailClauseNode);
BLangSimpleVariableDef variableDefinitionNode = (BLangSimpleVariableDef) TreeBuilder.
createSimpleVariableDefinitionNode();
BLangSimpleVariable var = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
boolean isDeclaredWithVar = onFailClauseNode.typeDescriptor().kind() == SyntaxKind.VAR_TYPE_DESC;
var.isDeclaredWithVar = isDeclaredWithVar;
if (!isDeclaredWithVar) {
var.setTypeNode(createTypeNode(onFailClauseNode.typeDescriptor()));
}
var.pos = getPosition(onFailClauseNode);
var.setName(this.createIdentifier(onFailClauseNode.failErrorName()));
var.name.pos = getPosition(onFailClauseNode.failErrorName());
variableDefinitionNode.setVariable(var);
variableDefinitionNode.pos = var.name.pos;
BLangOnFailClause onFailClause = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode();
onFailClause.pos = pos;
onFailClause.isDeclaredWithVar = isDeclaredWithVar;
markVariableWithFlag(variableDefinitionNode.getVariable(), Flag.FINAL);
onFailClause.variableDefinitionNode = variableDefinitionNode;
BLangBlockStmt blockNode = (BLangBlockStmt) transform(onFailClauseNode.blockStatement());
blockNode.pos = getPosition(onFailClauseNode);
onFailClause.body = blockNode;
return onFailClause;
}
@Override
public BLangNode transform(LetClauseNode letClauseNode) {
BLangLetClause bLLetClause = (BLangLetClause) TreeBuilder.createLetClauseNode();
bLLetClause.pos = getPosition(letClauseNode);
List<BLangLetVariable> letVars = new ArrayList<>();
for (LetVariableDeclarationNode letVarDeclr : letClauseNode.letVarDeclarations()) {
BLangLetVariable letVar = createLetVariable(letVarDeclr);
letVar.definitionNode.getVariable().addFlag(Flag.FINAL);
letVars.add(letVar);
}
if (!letVars.isEmpty()) {
bLLetClause.letVarDeclarations = letVars;
}
return bLLetClause;
}
@Override
public BLangNode transform(FromClauseNode fromClauseNode) {
BLangFromClause fromClause = (BLangFromClause) TreeBuilder.createFromClauseNode();
fromClause.pos = getPosition(fromClauseNode);
fromClause.collection = createExpression(fromClauseNode.expression());
TypedBindingPatternNode bindingPatternNode = fromClauseNode.typedBindingPattern();
fromClause.variableDefinitionNode = createBLangVarDef(getPosition(bindingPatternNode), bindingPatternNode,
Optional.empty(), Optional.empty());
boolean isDeclaredWithVar = bindingPatternNode.typeDescriptor().kind() == SyntaxKind.VAR_TYPE_DESC;
fromClause.isDeclaredWithVar = isDeclaredWithVar;
return fromClause;
}
@Override
public BLangNode transform(WhereClauseNode whereClauseNode) {
BLangWhereClause whereClause = (BLangWhereClause) TreeBuilder.createWhereClauseNode();
whereClause.pos = getPosition(whereClauseNode);
whereClause.expression = createExpression(whereClauseNode.expression());
return whereClause;
}
@Override
public BLangNode transform(SelectClauseNode selectClauseNode) {
BLangSelectClause selectClause = (BLangSelectClause) TreeBuilder.createSelectClauseNode();
selectClause.pos = getPosition(selectClauseNode);
selectClause.expression = createExpression(selectClauseNode.expression());
return selectClause;
}
@Override
public BLangNode transform(OnConflictClauseNode onConflictClauseNode) {
BLangOnConflictClause onConflictClause = (BLangOnConflictClause) TreeBuilder.createOnConflictClauseNode();
onConflictClause.pos = getPosition(onConflictClauseNode);
onConflictClause.expression = createExpression(onConflictClauseNode.expression());
return onConflictClause;
}
@Override
public BLangNode transform(LimitClauseNode limitClauseNode) {
BLangLimitClause selectClause = (BLangLimitClause) TreeBuilder.createLimitClauseNode();
selectClause.pos = getPosition(limitClauseNode);
selectClause.expression = createExpression(limitClauseNode.expression());
return selectClause;
}
@Override
public BLangNode transform(OnClauseNode onClauseNode) {
BLangOnClause onClause = (BLangOnClause) TreeBuilder.createOnClauseNode();
onClause.pos = getPosition(onClauseNode);
onClause.lhsExpr = createExpression(onClauseNode.lhsExpression());
onClause.rhsExpr = createExpression(onClauseNode.rhsExpression());
return onClause;
}
@Override
public BLangNode transform(JoinClauseNode joinClauseNode) {
BLangJoinClause joinClause = (BLangJoinClause) TreeBuilder.createJoinClauseNode();
joinClause.pos = getPosition(joinClauseNode);
TypedBindingPatternNode typedBindingPattern = joinClauseNode.typedBindingPattern();
joinClause.variableDefinitionNode = createBLangVarDef(getPosition(joinClauseNode),
typedBindingPattern, Optional.empty(), Optional.empty());
joinClause.collection = createExpression(joinClauseNode.expression());
joinClause.isDeclaredWithVar = typedBindingPattern.typeDescriptor().kind() == SyntaxKind.VAR_TYPE_DESC;
joinClause.isOuterJoin = joinClauseNode.outerKeyword().isPresent();
OnClauseNode onClauseNode = joinClauseNode.joinOnCondition();
BLangOnClause onClause = (BLangOnClause) TreeBuilder.createOnClauseNode();
onClause.pos = getPosition(onClauseNode);
if (!onClauseNode.equalsKeyword().isMissing()) {
onClause.equalsKeywordPos = getPosition(onClauseNode.equalsKeyword());
}
onClause.lhsExpr = createExpression(onClauseNode.lhsExpression());
onClause.rhsExpr = createExpression(onClauseNode.rhsExpression());
joinClause.onClause = onClause;
return joinClause;
}
@Override
public BLangNode transform(OrderByClauseNode orderByClauseNode) {
BLangOrderByClause orderByClause = (BLangOrderByClause) TreeBuilder.createOrderByClauseNode();
orderByClause.pos = getPosition(orderByClauseNode);
for (OrderKeyNode orderKeyNode : orderByClauseNode.orderKey()) {
orderByClause.addOrderKey(createOrderKey(orderKeyNode));
}
return orderByClause;
}
public BLangOrderKey createOrderKey(OrderKeyNode orderKeyNode) {
BLangOrderKey orderKey = (BLangOrderKey) TreeBuilder.createOrderKeyNode();
orderKey.pos = getPosition(orderKeyNode);
orderKey.expression = createExpression(orderKeyNode.expression());
if (orderKeyNode.orderDirection().isPresent() &&
orderKeyNode.orderDirection().get().text().equals("descending")) {
orderKey.isAscending = false;
} else {
orderKey.isAscending = true;
}
return orderKey;
}
@Override
public BLangNode transform(IntersectionTypeDescriptorNode intersectionTypeDescriptorNode) {
BLangType lhsType = (BLangType) createTypeNode(intersectionTypeDescriptorNode.leftTypeDesc());
BLangType rhsType = (BLangType) createTypeNode(intersectionTypeDescriptorNode.rightTypeDesc());
BLangIntersectionTypeNode intersectionType;
if (rhsType.getKind() == NodeKind.INTERSECTION_TYPE_NODE) {
intersectionType = (BLangIntersectionTypeNode) rhsType;
intersectionType.constituentTypeNodes.add(0, lhsType);
} else if (lhsType.getKind() == NodeKind.INTERSECTION_TYPE_NODE) {
intersectionType = (BLangIntersectionTypeNode) lhsType;
intersectionType.constituentTypeNodes.add(rhsType);
} else {
intersectionType = (BLangIntersectionTypeNode) TreeBuilder.createIntersectionTypeNode();
intersectionType.constituentTypeNodes.add(lhsType);
intersectionType.constituentTypeNodes.add(rhsType);
}
intersectionType.pos = getPosition(intersectionTypeDescriptorNode);
return intersectionType;
}
@Override
protected BLangNode transformSyntaxNode(Node node) {
throw new RuntimeException("Node not supported: " + node.getClass().getSimpleName());
}
@Override
public BLangNode transform(ServiceDeclarationNode serviceDeclarationNode) {
Location pos = getPositionWithoutMetadata(serviceDeclarationNode);
BLangClassDefinition annonClassDef = transformObjectCtorExpressionBody(serviceDeclarationNode.members());
annonClassDef.isServiceDecl = true;
annonClassDef.pos = pos;
annonClassDef.flagSet.add(SERVICE);
List<IdentifierNode> absResourcePathPath = new ArrayList<>();
NodeList<Node> pathList = serviceDeclarationNode.absoluteResourcePath();
BLangLiteral serviceNameLiteral = null;
if (pathList.size() == 1 && pathList.get(0).kind() == SyntaxKind.STRING_LITERAL) {
serviceNameLiteral = (BLangLiteral) createExpression(pathList.get(0));
} else {
for (var token : pathList) {
String text = ((Token) token).text();
if (pathList.size() == 1 && text.equals("/")) {
absResourcePathPath.add(createIdentifier((Token) token));
} else if (!text.equals("/")) {
absResourcePathPath.add(createIdentifier((Token) token));
}
}
}
String genName = anonymousModelHelper.getNextAnonymousTypeKey(packageID);
IdentifierNode anonTypeGenName = createIdentifier(pos, genName);
annonClassDef.setName(anonTypeGenName);
annonClassDef.flagSet.add(Flag.PUBLIC);
Optional<TypeDescriptorNode> typeReference = serviceDeclarationNode.typeDescriptor();
typeReference.ifPresent(typeReferenceNode -> {
BLangType typeNode = createTypeNode(typeReferenceNode);
annonClassDef.typeRefs.add(typeNode);
});
annonClassDef.annAttachments = applyAll(getAnnotations(serviceDeclarationNode.metadata()));
annonClassDef.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(serviceDeclarationNode.metadata()));
addToTop(annonClassDef);
BLangIdentifier identifier = (BLangIdentifier) TreeBuilder.createIdentifierNode();
BLangUserDefinedType userDefinedType = createUserDefinedType(pos, identifier, annonClassDef.name);
BLangTypeInit initNode = (BLangTypeInit) TreeBuilder.createInitNode();
initNode.pos = pos;
initNode.userDefinedType = userDefinedType;
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier pkgAlias = createIdentifier(pos, "");
BLangNameReference nameReference = new BLangNameReference(pos, null, pkgAlias, createIdentifier(pos, genName));
invocationNode.name = (BLangIdentifier) nameReference.name;
invocationNode.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
initNode.argsExpr.addAll(invocationNode.argExprs);
initNode.initInvocation = invocationNode;
BLangSimpleVariable serviceVariable = createServiceVariable(pos, annonClassDef, initNode);
List<BLangExpression> exprs = new ArrayList<>();
for (var exp : serviceDeclarationNode.expressions()) {
exprs.add(createExpression(exp));
}
BLangService service = (BLangService) TreeBuilder.createServiceNode();
service.serviceVariable = serviceVariable;
service.attachedExprs = exprs;
service.serviceClass = annonClassDef;
service.absoluteResourcePath = absResourcePathPath;
service.serviceNameLiteral = serviceNameLiteral;
service.annAttachments = annonClassDef.annAttachments;
service.pos = pos;
service.name = createIdentifier(pos, anonymousModelHelper.getNextAnonymousServiceVarKey(packageID));
return service;
}
private BLangSimpleVariable createServiceVariable(Location pos, BLangClassDefinition annonClassDef,
BLangTypeInit initNode) {
BLangUserDefinedType typeName = createUserDefinedType(pos,
(BLangIdentifier) TreeBuilder.createIdentifierNode(), annonClassDef.name);
BLangSimpleVariable serviceInstance =
(BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
serviceInstance.typeNode = typeName;
String serviceVarName = anonymousModelHelper.getNextAnonymousServiceVarKey(packageID);
serviceInstance.name = createIdentifier(pos, serviceVarName);
serviceInstance.expr = initNode;
serviceInstance.internal = true;
return serviceInstance;
}
@Override
public BLangNode transform(ClassDefinitionNode classDefinitionNode) {
BLangClassDefinition blangClass = (BLangClassDefinition) TreeBuilder.createClassDefNode();
blangClass.pos = getPositionWithoutMetadata(classDefinitionNode);
blangClass.annAttachments = applyAll(getAnnotations(classDefinitionNode.metadata()));
BLangIdentifier identifierNode = createIdentifier(classDefinitionNode.className());
blangClass.setName(identifierNode);
blangClass.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(classDefinitionNode.metadata()));
classDefinitionNode.visibilityQualifier().ifPresent(visibilityQual -> {
if (visibilityQual.kind() == SyntaxKind.PUBLIC_KEYWORD) {
blangClass.flagSet.add(Flag.PUBLIC);
}
});
for (Token qualifier : classDefinitionNode.classTypeQualifiers()) {
SyntaxKind kind = qualifier.kind();
switch (kind) {
case DISTINCT_KEYWORD:
blangClass.flagSet.add(Flag.DISTINCT);
break;
case CLIENT_KEYWORD:
blangClass.flagSet.add(Flag.CLIENT);
break;
case READONLY_KEYWORD:
blangClass.flagSet.add(Flag.READONLY);
break;
case SERVICE_KEYWORD:
blangClass.flagSet.add(Flag.SERVICE);
break;
case ISOLATED_KEYWORD:
blangClass.flagSet.add(Flag.ISOLATED);
break;
default:
throw new RuntimeException("Syntax kind is not supported: " + kind);
}
}
NodeList<Node> members = classDefinitionNode.members();
for (Node node : members) {
BLangNode bLangNode = node.apply(this);
if (bLangNode.getKind() == NodeKind.FUNCTION || bLangNode.getKind() == NodeKind.RESOURCE_FUNC) {
BLangFunction bLangFunction = (BLangFunction) bLangNode;
bLangFunction.attachedFunction = true;
bLangFunction.flagSet.add(Flag.ATTACHED);
if (Names.USER_DEFINED_INIT_SUFFIX.value.equals(bLangFunction.name.value)) {
if (blangClass.initFunction == null) {
bLangFunction.objInitFunction = true;
blangClass.initFunction = bLangFunction;
} else {
blangClass.addFunction(bLangFunction);
}
} else {
blangClass.addFunction(bLangFunction);
}
} else if (bLangNode.getKind() == NodeKind.VARIABLE) {
blangClass.addField((BLangSimpleVariable) bLangNode);
} else if (bLangNode.getKind() == NodeKind.USER_DEFINED_TYPE) {
blangClass.addTypeReference((BLangType) bLangNode);
}
}
return blangClass;
}
@Override
public BLangNode transform(RetryStatementNode retryStatementNode) {
BLangRetrySpec retrySpec = createRetrySpec(retryStatementNode);
Location pos = getPosition(retryStatementNode);
StatementNode retryBody = retryStatementNode.retryBody();
if (retryBody.kind() == SyntaxKind.TRANSACTION_STATEMENT) {
BLangRetryTransaction retryTransaction = (BLangRetryTransaction) TreeBuilder.createRetryTransactionNode();
retryTransaction.pos = pos;
retryTransaction.setRetrySpec(retrySpec);
retryTransaction.setTransaction((BLangTransaction) retryBody.apply(this));
return retryTransaction;
}
BLangRetry retryNode = (BLangRetry) TreeBuilder.createRetryNode();
retryNode.pos = pos;
retryNode.setRetrySpec(retrySpec);
BLangBlockStmt retryBlock = (BLangBlockStmt) retryBody.apply(this);
retryNode.setRetryBody(retryBlock);
retryStatementNode.onFailClause().ifPresent(onFailClauseNode -> {
retryNode.setOnFailClause(
(org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this)));
});
return retryNode;
}
private BLangRetrySpec createRetrySpec(RetryStatementNode retryStatementNode) {
BLangRetrySpec retrySpec = (BLangRetrySpec) TreeBuilder.createRetrySpecNode();
if (retryStatementNode.typeParameter().isPresent()) {
TypeParameterNode typeParam = retryStatementNode.typeParameter().get();
retrySpec.retryManagerType = createTypeNode(typeParam.typeNode());
retrySpec.pos = getPosition(typeParam);
}
if (retryStatementNode.arguments().isPresent()) {
ParenthesizedArgList arg = retryStatementNode.arguments().get();
retrySpec.pos = getPosition(arg);
for (Node argNode : arg.arguments()) {
retrySpec.argExprs.add(createExpression(argNode));
}
}
if (retrySpec.pos == null) {
retrySpec.pos = getPosition(retryStatementNode);
}
return retrySpec;
}
@Override
public BLangNode transform(TransactionalExpressionNode transactionalExpressionNode) {
BLangTransactionalExpr transactionalExpr = TreeBuilder.createTransactionalExpressionNode();
transactionalExpr.pos = getPosition(transactionalExpressionNode);
return transactionalExpr;
}
@Override
public BLangNode transform(XMLFilterExpressionNode xmlFilterExpressionNode) {
List<BLangXMLElementFilter> filters = new ArrayList<>();
XMLNamePatternChainingNode xmlNamePatternChainingNode = xmlFilterExpressionNode.xmlPatternChain();
for (Node node : xmlNamePatternChainingNode.xmlNamePattern()) {
filters.add(createXMLElementFilter(node));
}
BLangExpression expr = createExpression(xmlFilterExpressionNode.expression());
BLangXMLElementAccess elementAccess = new BLangXMLElementAccess(getPosition(xmlFilterExpressionNode), null,
expr, filters);
return elementAccess;
}
@Override
public BLangNode transform(XMLStepExpressionNode xmlStepExpressionNode) {
List<BLangXMLElementFilter> filters = new ArrayList<>();
int starCount = 0;
if (xmlStepExpressionNode.xmlStepStart().kind() == SyntaxKind.SLASH_ASTERISK_TOKEN) {
starCount = 1;
} else if (xmlStepExpressionNode.xmlStepStart().kind() == SyntaxKind.XML_NAME_PATTERN_CHAIN) {
XMLNamePatternChainingNode xmlNamePatternChainingNode =
(XMLNamePatternChainingNode) xmlStepExpressionNode.xmlStepStart();
for (Node node : xmlNamePatternChainingNode.xmlNamePattern()) {
filters.add(createXMLElementFilter(node));
}
switch (xmlNamePatternChainingNode.startToken().kind()) {
case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:
starCount = 2;
break;
case SLASH_ASTERISK_TOKEN:
starCount = 1;
break;
}
}
BLangExpression expr = createExpression(xmlStepExpressionNode.expression());
BLangXMLNavigationAccess xmlNavigationAccess =
new BLangXMLNavigationAccess(getPosition(xmlStepExpressionNode), null, expr, filters,
XMLNavigationAccess.NavAccessType.fromInt(starCount), null);
return xmlNavigationAccess;
}
@Override
public BLangNode transform(MatchStatementNode matchStatementNode) {
BLangMatchStatement matchStatement = (BLangMatchStatement) TreeBuilder.createMatchStatementNode();
BLangExpression matchStmtExpr = createExpression(matchStatementNode.condition());
matchStatement.setExpression(matchStmtExpr);
for (MatchClauseNode matchClauseNode : matchStatementNode.matchClauses()) {
BLangMatchClause bLangMatchClause = (BLangMatchClause) TreeBuilder.createMatchClause();
bLangMatchClause.pos = getPosition(matchClauseNode);
bLangMatchClause.expr = matchStmtExpr;
boolean matchGuardAvailable = false;
if (matchClauseNode.matchGuard().isPresent()) {
matchGuardAvailable = true;
BLangMatchGuard bLangMatchGuard = (BLangMatchGuard) TreeBuilder.createMatchGuard();
bLangMatchGuard.expr = createExpression(matchClauseNode.matchGuard().get().expression());
bLangMatchClause.setMatchGuard(bLangMatchGuard);
}
for (Node matchPattern : matchClauseNode.matchPatterns()) {
BLangMatchPattern bLangMatchPattern = transformMatchPattern(matchPattern);
if (bLangMatchPattern != null) {
bLangMatchPattern.matchExpr = matchStmtExpr;
bLangMatchPattern.matchGuardIsAvailable = matchGuardAvailable;
bLangMatchClause.addMatchPattern(bLangMatchPattern);
}
}
bLangMatchClause.setBlockStatement((BLangBlockStmt) transform(matchClauseNode.blockStatement()));
matchStatement.addMatchClause(bLangMatchClause);
}
matchStatementNode.onFailClause().ifPresent(onFailClauseNode -> {
matchStatement.setOnFailClause(
(org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this)));
});
return matchStatement;
}
public BLangNode createXmlTemplateLiteral(TemplateExpressionNode expressionNode) {
SyntaxKind contentKind;
if (expressionNode.content().isEmpty()) {
contentKind = SyntaxKind.XML_TEXT;
} else {
contentKind = expressionNode.content().get(0).kind();
}
switch (contentKind) {
case XML_COMMENT:
case XML_PI:
case XML_ELEMENT:
case XML_EMPTY_ELEMENT:
return createExpression(expressionNode.content().get(0));
default:
return createXMLLiteral(expressionNode);
}
}
private BLangMatchPattern transformMatchPattern(Node matchPattern) {
Location matchPatternPos = matchPattern.location();
SyntaxKind kind = matchPattern.kind();
if (kind == SyntaxKind.SIMPLE_NAME_REFERENCE &&
((SimpleNameReferenceNode) matchPattern).name().isMissing()) {
dlog.error(matchPatternPos, DiagnosticErrorCode.MATCH_PATTERN_NOT_SUPPORTED);
return null;
}
if (kind == SyntaxKind.SIMPLE_NAME_REFERENCE &&
((SimpleNameReferenceNode) matchPattern).name().text().equals("_")) {
BLangWildCardMatchPattern bLangWildCardMatchPattern =
(BLangWildCardMatchPattern) TreeBuilder.createWildCardMatchPattern();
bLangWildCardMatchPattern.pos = matchPatternPos;
return bLangWildCardMatchPattern;
}
if (kind == SyntaxKind.IDENTIFIER_TOKEN && ((IdentifierToken) matchPattern).text().equals("_")) {
BLangWildCardMatchPattern bLangWildCardMatchPattern =
(BLangWildCardMatchPattern) TreeBuilder.createWildCardMatchPattern();
bLangWildCardMatchPattern.pos = matchPatternPos;
return bLangWildCardMatchPattern;
}
if (kind == SyntaxKind.NUMERIC_LITERAL ||
kind == SyntaxKind.STRING_LITERAL ||
kind == SyntaxKind.SIMPLE_NAME_REFERENCE ||
kind == SyntaxKind.IDENTIFIER_TOKEN ||
kind == SyntaxKind.NULL_LITERAL ||
kind == SyntaxKind.NIL_LITERAL ||
kind == SyntaxKind.BOOLEAN_LITERAL) {
BLangConstPattern bLangConstMatchPattern =
(BLangConstPattern) TreeBuilder.createConstMatchPattern();
bLangConstMatchPattern.setExpression(createExpression(matchPattern));
bLangConstMatchPattern.pos = matchPatternPos;
return bLangConstMatchPattern;
}
if (kind == SyntaxKind.TYPED_BINDING_PATTERN) {
TypedBindingPatternNode typedBindingPatternNode = (TypedBindingPatternNode) matchPattern;
BLangVarBindingPatternMatchPattern bLangVarBindingPattern =
(BLangVarBindingPatternMatchPattern) TreeBuilder.createVarBindingPattern();
bLangVarBindingPattern.pos = matchPatternPos;
bLangVarBindingPattern.setBindingPattern(transformBindingPattern(typedBindingPatternNode.bindingPattern()));
return bLangVarBindingPattern;
}
if (kind == SyntaxKind.ERROR_MATCH_PATTERN) {
return transformErrorMatchPattern((ErrorMatchPatternNode) matchPattern, matchPatternPos);
}
if (kind == SyntaxKind.NAMED_ARG_MATCH_PATTERN) {
return transformNamedArgMatchPattern((NamedArgMatchPatternNode) matchPattern, matchPatternPos);
}
if (kind == SyntaxKind.LIST_MATCH_PATTERN) {
return transformListMatchPattern((ListMatchPatternNode) matchPattern, matchPatternPos);
}
if (kind == SyntaxKind.REST_MATCH_PATTERN) {
return transformRestMatchPattern((RestMatchPatternNode) matchPattern, matchPatternPos);
}
if (kind == SyntaxKind.MAPPING_MATCH_PATTERN) {
return transformMappingMatchPattern((MappingMatchPatternNode) matchPattern, matchPatternPos);
}
if (kind == SyntaxKind.FIELD_MATCH_PATTERN) {
return transformFieldMatchPattern((FieldMatchPatternNode) matchPattern, matchPatternPos);
}
dlog.error(matchPatternPos, DiagnosticErrorCode.MATCH_PATTERN_NOT_SUPPORTED);
return null;
}
private BLangErrorMatchPattern transformErrorMatchPattern(ErrorMatchPatternNode errorMatchPatternNode,
Location pos) {
BLangErrorMatchPattern bLangErrorMatchPattern =
(BLangErrorMatchPattern) TreeBuilder.createErrorMatchPattern();
bLangErrorMatchPattern.pos = pos;
NameReferenceNode nameReferenceNode;
if (errorMatchPatternNode.typeReference().isPresent()) {
nameReferenceNode = errorMatchPatternNode.typeReference().get();
bLangErrorMatchPattern.errorTypeReference = (BLangUserDefinedType) createTypeNode(nameReferenceNode);
}
if (errorMatchPatternNode.argListMatchPatternNode().size() == 0) {
return bLangErrorMatchPattern;
}
Node node = errorMatchPatternNode.argListMatchPatternNode().get(0);
if (isErrorFieldMatchPattern(node)) {
createErrorFieldMatchPatterns(0, errorMatchPatternNode, bLangErrorMatchPattern);
return bLangErrorMatchPattern;
}
bLangErrorMatchPattern.errorMessageMatchPattern = createErrorMessageMatchPattern(node);
if (errorMatchPatternNode.argListMatchPatternNode().size() == 1) {
return bLangErrorMatchPattern;
}
node = errorMatchPatternNode.argListMatchPatternNode().get(1);
if (isErrorFieldMatchPattern(node)) {
createErrorFieldMatchPatterns(1, errorMatchPatternNode, bLangErrorMatchPattern);
return bLangErrorMatchPattern;
}
bLangErrorMatchPattern.errorCauseMatchPattern = createErrorCauseMatchPattern(node);
createErrorFieldMatchPatterns(2, errorMatchPatternNode, bLangErrorMatchPattern);
return bLangErrorMatchPattern;
}
private BLangNamedArgMatchPattern transformNamedArgMatchPattern(NamedArgMatchPatternNode namedArgMatchPatternNode,
Location pos) {
BLangNamedArgMatchPattern bLangNamedArgMatchPattern =
(BLangNamedArgMatchPattern) TreeBuilder.createNamedArgMatchPattern();
bLangNamedArgMatchPattern.argName = createIdentifier(namedArgMatchPatternNode.identifier());
bLangNamedArgMatchPattern.matchPattern = transformMatchPattern(namedArgMatchPatternNode.matchPattern());
bLangNamedArgMatchPattern.pos = pos;
return bLangNamedArgMatchPattern;
}
private BLangListMatchPattern transformListMatchPattern(ListMatchPatternNode listMatchPatternNode,
Location pos) {
BLangListMatchPattern bLangListMatchPattern =
(BLangListMatchPattern) TreeBuilder.createListMatchPattern();
bLangListMatchPattern.pos = pos;
SeparatedNodeList<Node> matchPatterns = listMatchPatternNode.matchPatterns();
int matchPatternListSize = matchPatterns.size();
if (matchPatternListSize == 0) {
return bLangListMatchPattern;
}
for (int i = 0; i < matchPatternListSize - 1; i++) {
BLangMatchPattern bLangMemberMatchPattern = transformMatchPattern(matchPatterns.get(i));
if (bLangMemberMatchPattern == null) {
continue;
}
bLangListMatchPattern.addMatchPattern(bLangMemberMatchPattern);
}
BLangMatchPattern lastMember = transformMatchPattern(matchPatterns.get(matchPatternListSize - 1));
if (lastMember.getKind() == NodeKind.REST_MATCH_PATTERN) {
bLangListMatchPattern.setRestMatchPattern((BLangRestMatchPattern) lastMember);
} else {
bLangListMatchPattern.addMatchPattern(lastMember);
}
return bLangListMatchPattern;
}
private BLangRestMatchPattern transformRestMatchPattern(RestMatchPatternNode restMatchPatternNode, Location pos) {
BLangRestMatchPattern bLangRestMatchPattern = (BLangRestMatchPattern) TreeBuilder.createRestMatchPattern();
bLangRestMatchPattern.pos = pos;
SimpleNameReferenceNode variableName = restMatchPatternNode.variableName();
bLangRestMatchPattern.setIdentifier(createIdentifier(getPosition(variableName), variableName.name()));
return bLangRestMatchPattern;
}
private BLangMappingMatchPattern transformMappingMatchPattern(MappingMatchPatternNode mappingMatchPatternNode,
Location pos) {
BLangMappingMatchPattern bLangMappingMatchPattern =
(BLangMappingMatchPattern) TreeBuilder.createMappingMatchPattern();
bLangMappingMatchPattern.pos = pos;
SeparatedNodeList<Node> fieldMatchPatterns = mappingMatchPatternNode.fieldMatchPatterns();
int fieldMatchPatternListSize = fieldMatchPatterns.size();
if (fieldMatchPatternListSize == 0) {
return bLangMappingMatchPattern;
}
for (int i = 0; i < fieldMatchPatternListSize - 1; i++) {
bLangMappingMatchPattern.fieldMatchPatterns.add((BLangFieldMatchPattern)
transformMatchPattern(fieldMatchPatterns.get(i)));
}
BLangMatchPattern lastMember = transformMatchPattern(fieldMatchPatterns.get(fieldMatchPatternListSize - 1));
if (lastMember.getKind() == NodeKind.REST_MATCH_PATTERN) {
bLangMappingMatchPattern.setRestMatchPattern((BLangRestMatchPattern) lastMember);
} else {
bLangMappingMatchPattern.addFieldMatchPattern((BLangFieldMatchPattern) lastMember);
}
return bLangMappingMatchPattern;
}
private BLangFieldMatchPattern transformFieldMatchPattern(FieldMatchPatternNode fieldMatchPatternNode,
Location pos) {
BLangFieldMatchPattern bLangFieldMatchPattern =
(BLangFieldMatchPattern) TreeBuilder.createFieldMatchPattern();
bLangFieldMatchPattern.pos = pos;
bLangFieldMatchPattern.fieldName =
createIdentifier(fieldMatchPatternNode.fieldNameNode());
bLangFieldMatchPattern.matchPattern = transformMatchPattern(fieldMatchPatternNode.matchPattern());
return bLangFieldMatchPattern;
}
private BLangBindingPattern transformBindingPattern(Node bindingPattern) {
Location pos = getPosition(bindingPattern);
SyntaxKind patternKind = bindingPattern.kind();
switch (patternKind) {
case WILDCARD_BINDING_PATTERN:
return transformWildCardBindingPattern(pos);
case CAPTURE_BINDING_PATTERN:
return transformCaptureBindingPattern((CaptureBindingPatternNode) bindingPattern, pos);
case LIST_BINDING_PATTERN:
return transformListBindingPattern((ListBindingPatternNode) bindingPattern, pos);
case NAMED_ARG_BINDING_PATTERN:
return transformNamedArgBindingPattern((NamedArgBindingPatternNode) bindingPattern, pos);
case REST_BINDING_PATTERN:
return transformRestBindingPattern((RestBindingPatternNode) bindingPattern, pos);
case MAPPING_BINDING_PATTERN:
return transformMappingBindingPattern((MappingBindingPatternNode) bindingPattern, pos);
case FIELD_BINDING_PATTERN:
return transformFieldBindingPattern(bindingPattern, pos);
case ERROR_BINDING_PATTERN:
return transformErrorBindingPattern((ErrorBindingPatternNode) bindingPattern, pos);
default:
dlog.error(pos, DiagnosticErrorCode.MATCH_PATTERN_NOT_SUPPORTED);
return null;
}
}
private BLangWildCardBindingPattern transformWildCardBindingPattern(Location pos) {
BLangWildCardBindingPattern bLangWildCardBindingPattern =
(BLangWildCardBindingPattern) TreeBuilder.createWildCardBindingPattern();
bLangWildCardBindingPattern.pos = pos;
return bLangWildCardBindingPattern;
}
private BLangCaptureBindingPattern transformCaptureBindingPattern(CaptureBindingPatternNode captureBindingPattern,
Location pos) {
BLangCaptureBindingPattern bLangCaptureBindingPattern =
(BLangCaptureBindingPattern) TreeBuilder.createCaptureBindingPattern();
bLangCaptureBindingPattern.setIdentifier(createIdentifier(captureBindingPattern.variableName()));
bLangCaptureBindingPattern.pos = pos;
return bLangCaptureBindingPattern;
}
private BLangRestBindingPattern transformRestBindingPattern(RestBindingPatternNode restBindingPatternNode,
Location pos) {
BLangRestBindingPattern bLangRestBindingPattern =
(BLangRestBindingPattern) TreeBuilder.createRestBindingPattern();
bLangRestBindingPattern.pos = pos;
SimpleNameReferenceNode variableName = restBindingPatternNode.variableName();
bLangRestBindingPattern.setIdentifier(createIdentifier(getPosition(variableName), variableName.name()));
return bLangRestBindingPattern;
}
private BLangListBindingPattern transformListBindingPattern(ListBindingPatternNode listBindingPatternNode,
Location pos) {
BLangListBindingPattern bLangListBindingPattern =
(BLangListBindingPattern) TreeBuilder.createListBindingPattern();
bLangListBindingPattern.pos = pos;
for (Node listMemberBindingPattern : listBindingPatternNode.bindingPatterns()) {
if (listMemberBindingPattern.kind() != SyntaxKind.REST_BINDING_PATTERN) {
bLangListBindingPattern.addBindingPattern(transformBindingPattern(listMemberBindingPattern));
continue;
}
bLangListBindingPattern.restBindingPattern =
(BLangRestBindingPattern) transformBindingPattern(listMemberBindingPattern);
}
return bLangListBindingPattern;
}
private BLangMappingBindingPattern transformMappingBindingPattern(MappingBindingPatternNode
mappingBindingPatternNode,
Location pos) {
BLangMappingBindingPattern bLangMappingBindingPattern =
(BLangMappingBindingPattern) TreeBuilder.createMappingBindingPattern();
bLangMappingBindingPattern.pos = pos;
for (Node fieldBindingPattern : mappingBindingPatternNode.fieldBindingPatterns()) {
if (fieldBindingPattern.kind() == SyntaxKind.REST_BINDING_PATTERN) {
bLangMappingBindingPattern.restBindingPattern =
(BLangRestBindingPattern) transformBindingPattern(fieldBindingPattern);
continue;
}
bLangMappingBindingPattern.fieldBindingPatterns.add(
(BLangFieldBindingPattern) transformBindingPattern(fieldBindingPattern));
}
return bLangMappingBindingPattern;
}
private BLangFieldBindingPattern transformFieldBindingPattern(Node bindingPattern, Location pos) {
BLangFieldBindingPattern bLangFieldBindingPattern =
(BLangFieldBindingPattern) TreeBuilder.createFieldBindingPattern();
bLangFieldBindingPattern.pos = pos;
if (bindingPattern instanceof FieldBindingPatternVarnameNode) {
FieldBindingPatternVarnameNode fieldBindingPatternVarnameNode =
(FieldBindingPatternVarnameNode) bindingPattern;
BLangIdentifier fieldName = createIdentifier(fieldBindingPatternVarnameNode.variableName().name());
bLangFieldBindingPattern.fieldName = fieldName;
BLangCaptureBindingPattern bLangCaptureBindingPatternInFieldBindingPattern =
(BLangCaptureBindingPattern) TreeBuilder.createCaptureBindingPattern();
bLangCaptureBindingPatternInFieldBindingPattern.setIdentifier(fieldName);
bLangCaptureBindingPatternInFieldBindingPattern.pos = pos;
bLangFieldBindingPattern.bindingPattern = bLangCaptureBindingPatternInFieldBindingPattern;
return bLangFieldBindingPattern;
}
FieldBindingPatternFullNode fieldBindingPatternNode = (FieldBindingPatternFullNode) bindingPattern;
bLangFieldBindingPattern.fieldName = createIdentifier(fieldBindingPatternNode.variableName().name());
bLangFieldBindingPattern.bindingPattern =
transformBindingPattern(fieldBindingPatternNode.bindingPattern());
return bLangFieldBindingPattern;
}
private BLangNamedArgBindingPattern transformNamedArgBindingPattern(NamedArgBindingPatternNode
namedArgBindingPattern,
Location pos) {
BLangNamedArgBindingPattern bLangNamedArgBindingPattern =
(BLangNamedArgBindingPattern) TreeBuilder.createNamedArgBindingPattern();
bLangNamedArgBindingPattern.pos = pos;
bLangNamedArgBindingPattern.argName = createIdentifier(namedArgBindingPattern.argName());
bLangNamedArgBindingPattern.bindingPattern =
transformBindingPattern(namedArgBindingPattern.bindingPattern());
return bLangNamedArgBindingPattern;
}
private BLangErrorBindingPattern transformErrorBindingPattern(ErrorBindingPatternNode errorBindingPatternNode,
Location pos) {
BLangErrorBindingPattern bLangErrorBindingPattern =
(BLangErrorBindingPattern) TreeBuilder.createErrorBindingPattern();
bLangErrorBindingPattern.pos = pos;
if (errorBindingPatternNode.typeReference().isPresent()) {
Node nameReferenceNode = errorBindingPatternNode.typeReference().get();
bLangErrorBindingPattern.errorTypeReference =
(BLangUserDefinedType) createTypeNode(nameReferenceNode);
}
if (errorBindingPatternNode.argListBindingPatterns().size() == 0) {
return bLangErrorBindingPattern;
}
Node node = errorBindingPatternNode.argListBindingPatterns().get(0);
if (isErrorFieldBindingPattern(node)) {
createErrorFieldBindingPatterns(0, errorBindingPatternNode, bLangErrorBindingPattern);
return bLangErrorBindingPattern;
}
bLangErrorBindingPattern.errorMessageBindingPattern = createErrorMessageBindingPattern(node);
if (errorBindingPatternNode.argListBindingPatterns().size() == 1) {
return bLangErrorBindingPattern;
}
node = errorBindingPatternNode.argListBindingPatterns().get(1);
if (isErrorFieldBindingPattern(node)) {
createErrorFieldBindingPatterns(1, errorBindingPatternNode, bLangErrorBindingPattern);
return bLangErrorBindingPattern;
}
bLangErrorBindingPattern.errorCauseBindingPattern = createErrorCauseBindingPattern(node);
createErrorFieldBindingPatterns(2, errorBindingPatternNode, bLangErrorBindingPattern);
return bLangErrorBindingPattern;
}
private boolean isErrorFieldMatchPattern(Node node) {
return node.kind() == SyntaxKind.NAMED_ARG_MATCH_PATTERN || node.kind() == SyntaxKind.REST_MATCH_PATTERN;
}
private boolean isErrorFieldBindingPattern(Node node) {
return node.kind() == SyntaxKind.NAMED_ARG_BINDING_PATTERN || node.kind() == SyntaxKind.REST_BINDING_PATTERN;
}
private BLangErrorMessageMatchPattern createErrorMessageMatchPattern(Node node) {
BLangMatchPattern matchPattern = transformMatchPattern(node);
BLangErrorMessageMatchPattern bLangErrorMessageMatchPattern =
(BLangErrorMessageMatchPattern) TreeBuilder.createErrorMessageMatchPattern();
bLangErrorMessageMatchPattern.pos = getPosition(node);
bLangErrorMessageMatchPattern.simpleMatchPattern = createSimpleMatchPattern(matchPattern);
return bLangErrorMessageMatchPattern;
}
private BLangErrorMessageBindingPattern createErrorMessageBindingPattern(Node node) {
BLangBindingPattern bindingPattern = transformBindingPattern(node);
BLangErrorMessageBindingPattern bLangErrorMessageBindingPattern =
(BLangErrorMessageBindingPattern) TreeBuilder.createErrorMessageBindingPattern();
bLangErrorMessageBindingPattern.pos = getPosition(node);
bLangErrorMessageBindingPattern.simpleBindingPattern = createSimpleBindingPattern(bindingPattern);
return bLangErrorMessageBindingPattern;
}
private BLangErrorCauseMatchPattern createErrorCauseMatchPattern(Node node) {
BLangMatchPattern matchPattern = transformMatchPattern(node);
BLangErrorCauseMatchPattern bLangErrorCauseMatchPattern =
(BLangErrorCauseMatchPattern) TreeBuilder.createErrorCauseMatchPattern();
bLangErrorCauseMatchPattern.pos = getPosition(node);
if (matchPattern.getKind() == NodeKind.ERROR_MATCH_PATTERN) {
bLangErrorCauseMatchPattern.errorMatchPattern = (BLangErrorMatchPattern) matchPattern;
return bLangErrorCauseMatchPattern;
}
bLangErrorCauseMatchPattern.simpleMatchPattern = createSimpleMatchPattern(matchPattern);
return bLangErrorCauseMatchPattern;
}
private BLangErrorCauseBindingPattern createErrorCauseBindingPattern(Node node) {
BLangBindingPattern bindingPattern = transformBindingPattern(node);
BLangErrorCauseBindingPattern bLangErrorCauseBindingPattern =
(BLangErrorCauseBindingPattern) TreeBuilder.createErrorCauseBindingPattern();
bLangErrorCauseBindingPattern.pos = getPosition(node);
if (bindingPattern.getKind() == NodeKind.ERROR_BINDING_PATTERN) {
bLangErrorCauseBindingPattern.errorBindingPattern = (BLangErrorBindingPattern) bindingPattern;
return bLangErrorCauseBindingPattern;
}
bLangErrorCauseBindingPattern.simpleBindingPattern = createSimpleBindingPattern(bindingPattern);
return bLangErrorCauseBindingPattern;
}
private BLangErrorFieldMatchPatterns createErrorFieldMatchPattern(Node errorFieldMatchPatternNode,
BLangErrorFieldMatchPatterns bLangErrorFieldMatchPatterns) {
BLangMatchPattern matchPattern = transformMatchPattern(errorFieldMatchPatternNode);
bLangErrorFieldMatchPatterns.pos = getPosition(errorFieldMatchPatternNode);
if (matchPattern.getKind() == NodeKind.NAMED_ARG_MATCH_PATTERN) {
bLangErrorFieldMatchPatterns.addNamedArgMatchPattern(
(org.ballerinalang.model.tree.matchpatterns.NamedArgMatchPatternNode) matchPattern);
} else if (matchPattern.getKind() == NodeKind.REST_MATCH_PATTERN) {
bLangErrorFieldMatchPatterns.restMatchPattern = (BLangRestMatchPattern) matchPattern;
}
return bLangErrorFieldMatchPatterns;
}
private BLangErrorFieldBindingPatterns createErrorFieldBindingPattern(Node errorFieldBindingPatternNode,
BLangErrorFieldBindingPatterns
bLangErrorFieldBindingPatterns) {
BLangBindingPattern bindingPattern = transformBindingPattern(errorFieldBindingPatternNode);
bLangErrorFieldBindingPatterns.pos = getPosition(errorFieldBindingPatternNode);
if (bindingPattern.getKind() == NodeKind.NAMED_ARG_BINDING_PATTERN) {
bLangErrorFieldBindingPatterns.
addNamedArgBindingPattern(
(org.ballerinalang.model.tree.bindingpattern.NamedArgBindingPatternNode) bindingPattern);
} else if (bindingPattern.getKind() == NodeKind.REST_BINDING_PATTERN) {
bLangErrorFieldBindingPatterns.restBindingPattern = (BLangRestBindingPattern) bindingPattern;
}
return bLangErrorFieldBindingPatterns;
}
private void createErrorFieldMatchPatterns(int index, ErrorMatchPatternNode errorMatchPatternNode,
BLangErrorMatchPattern bLangErrorMatchPattern) {
BLangErrorFieldMatchPatterns bLangErrorFieldMatchPatterns =
(BLangErrorFieldMatchPatterns) TreeBuilder.createErrorFieldMatchPattern();
for (int i = index; i < errorMatchPatternNode.argListMatchPatternNode().size(); i++) {
Node errorFieldMatchPatternNode = errorMatchPatternNode.argListMatchPatternNode().get(i);
bLangErrorMatchPattern.errorFieldMatchPatterns = createErrorFieldMatchPattern(errorFieldMatchPatternNode,
bLangErrorFieldMatchPatterns);
}
}
private void createErrorFieldBindingPatterns(int index, ErrorBindingPatternNode errorBindingPatternNode,
BLangErrorBindingPattern bLangErrorBindingPattern) {
BLangErrorFieldBindingPatterns bLangErrorFieldBindingPatterns =
(BLangErrorFieldBindingPatterns) TreeBuilder.createErrorFieldBindingPattern();
for (int i = index; i < errorBindingPatternNode.argListBindingPatterns().size(); i++) {
Node errorFieldBindingPatternNode = errorBindingPatternNode.argListBindingPatterns().get(i);
bLangErrorBindingPattern.errorFieldBindingPatterns =
createErrorFieldBindingPattern(errorFieldBindingPatternNode, bLangErrorFieldBindingPatterns);
}
}
private BLangSimpleMatchPattern createSimpleMatchPattern(BLangNode bLangNode) {
BLangSimpleMatchPattern bLangSimpleMatchPattern =
(BLangSimpleMatchPattern) TreeBuilder.createSimpleMatchPattern();
NodeKind kind = bLangNode.getKind();
switch (kind) {
case WILDCARD_MATCH_PATTERN:
bLangSimpleMatchPattern.wildCardMatchPattern = (BLangWildCardMatchPattern) bLangNode;
break;
case CONST_MATCH_PATTERN:
bLangSimpleMatchPattern.constPattern = (BLangConstPattern) bLangNode;
break;
case VAR_BINDING_PATTERN_MATCH_PATTERN:
bLangSimpleMatchPattern.varVariableName = (BLangVarBindingPatternMatchPattern) bLangNode;
break;
}
return bLangSimpleMatchPattern;
}
private BLangCaptureBindingPattern createCaptureBindingPattern(CaptureBindingPatternNode
captureBindingPatternNode) {
BLangCaptureBindingPattern bLangCaptureBindingPattern =
(BLangCaptureBindingPattern) TreeBuilder.createCaptureBindingPattern();
bLangCaptureBindingPattern.setIdentifier(createIdentifier(captureBindingPatternNode
.variableName()));
bLangCaptureBindingPattern.pos = getPosition(captureBindingPatternNode);
return bLangCaptureBindingPattern;
}
private BLangSimpleBindingPattern createSimpleBindingPattern(BLangNode bLangNode) {
BLangSimpleBindingPattern bLangSimpleBindingPattern =
(BLangSimpleBindingPattern) TreeBuilder.createSimpleBindingPattern();
NodeKind kind = bLangNode.getKind();
switch (kind) {
case WILDCARD_BINDING_PATTERN:
bLangSimpleBindingPattern.wildCardBindingPattern = (BLangWildCardBindingPattern) bLangNode;
break;
case CAPTURE_BINDING_PATTERN:
bLangSimpleBindingPattern.captureBindingPattern = (BLangCaptureBindingPattern) bLangNode;
break;
}
return bLangSimpleBindingPattern;
}
private BLangXMLElementFilter createXMLElementFilter(Node node) {
String ns = "";
String elementName = "*";
Location nsPos = null;
Location elemNamePos = null;
SyntaxKind kind = node.kind();
switch (kind) {
case SIMPLE_NAME_REFERENCE:
SimpleNameReferenceNode simpleNameReferenceNode = (SimpleNameReferenceNode) node;
elementName = simpleNameReferenceNode.name().text();
elemNamePos = getPosition(simpleNameReferenceNode);
break;
case QUALIFIED_NAME_REFERENCE:
QualifiedNameReferenceNode qualifiedNameReferenceNode = (QualifiedNameReferenceNode) node;
elementName = qualifiedNameReferenceNode.identifier().text();
elemNamePos = getPosition(qualifiedNameReferenceNode.identifier());
ns = qualifiedNameReferenceNode.modulePrefix().text();
nsPos = getPosition(qualifiedNameReferenceNode.modulePrefix());
break;
case XML_ATOMIC_NAME_PATTERN:
XMLAtomicNamePatternNode atomicNamePatternNode = (XMLAtomicNamePatternNode) node;
elementName = atomicNamePatternNode.name().text();
elemNamePos = getPosition(atomicNamePatternNode.name());
ns = atomicNamePatternNode.prefix().text();
nsPos = getPosition(atomicNamePatternNode.prefix());
break;
case ASTERISK_TOKEN:
elemNamePos = getPosition(node);
}
if (stringStartsWithSingleQuote(ns)) {
ns = ns.substring(1);
}
if (stringStartsWithSingleQuote(elementName)) {
elementName = elementName.substring(1);
}
return new BLangXMLElementFilter(getPosition(node), null, ns, nsPos, elementName, elemNamePos);
}
private boolean stringStartsWithSingleQuote(String ns) {
return ns != null && ns.length() > 0 && ns.charAt(0) == '\'';
}
private String getValueFromByteArrayNode(ByteArrayLiteralNode byteArrayLiteralNode) {
StringBuilder value = new StringBuilder();
value.append(byteArrayLiteralNode.type().text());
value.append(" ");
value.append("`");
if (byteArrayLiteralNode.content().isPresent()) {
value.append(byteArrayLiteralNode.content().get().text());
}
value.append("`");
return value.toString();
}
private BLangRecordVariable createBLangRecordVariable(MappingBindingPatternNode mappingBindingPatternNode) {
BLangRecordVariable recordVariable = (BLangRecordVariable) TreeBuilder.createRecordVariableNode();
List<BLangRecordVariableKeyValue> fieldBindingPatternsList = new ArrayList<>();
for (BindingPatternNode node : mappingBindingPatternNode.fieldBindingPatterns()) {
BLangRecordVariableKeyValue recordKeyValue = new BLangRecordVariableKeyValue();
if (node instanceof FieldBindingPatternFullNode) {
FieldBindingPatternFullNode fullNode = (FieldBindingPatternFullNode) node;
recordKeyValue.key = createIdentifier(fullNode.variableName().name());
recordKeyValue.valueBindingPattern = getBLangVariableNode(fullNode.bindingPattern());
} else if (node instanceof FieldBindingPatternVarnameNode) {
FieldBindingPatternVarnameNode varnameNode = (FieldBindingPatternVarnameNode) node;
recordKeyValue.key = createIdentifier(varnameNode.variableName().name());
BLangSimpleVariable value = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
value.pos = getPosition(varnameNode);
IdentifierNode name = createIdentifier(varnameNode.variableName().name());
((BLangIdentifier) name).pos = value.pos;
value.setName(name);
recordKeyValue.valueBindingPattern = value;
} else {
recordVariable.restParam = getBLangVariableNode(node);
break;
}
fieldBindingPatternsList.add(recordKeyValue);
}
recordVariable.variableList = fieldBindingPatternsList;
recordVariable.pos = getPosition(mappingBindingPatternNode);
return recordVariable;
}
private BLangLiteral createEmptyLiteral() {
BLangLiteral bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
bLiteral.value = "";
bLiteral.originalValue = "";
bLiteral.type = symTable.getTypeFromTag(TypeTags.STRING);
return bLiteral;
}
private BLangVariable createSimpleVariable(Location location,
Token identifier,
Location identifierPos) {
BLangSimpleVariable memberVar = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
memberVar.pos = location;
IdentifierNode name = createIdentifier(identifierPos, identifier);
((BLangIdentifier) name).pos = identifierPos;
memberVar.setName(name);
return memberVar;
}
private BLangVariable getBLangVariableNode(BindingPatternNode bindingPattern) {
Token varName;
switch (bindingPattern.kind()) {
case MAPPING_BINDING_PATTERN:
MappingBindingPatternNode mappingBindingPatternNode = (MappingBindingPatternNode) bindingPattern;
return createBLangRecordVariable(mappingBindingPatternNode);
case LIST_BINDING_PATTERN:
ListBindingPatternNode listBindingPatternNode = (ListBindingPatternNode) bindingPattern;
BLangTupleVariable tupleVariable = (BLangTupleVariable) TreeBuilder.createTupleVariableNode();
tupleVariable.pos = getPosition(listBindingPatternNode);
for (BindingPatternNode memberBindingPattern : listBindingPatternNode.bindingPatterns()) {
if (memberBindingPattern.kind() == SyntaxKind.REST_BINDING_PATTERN) {
tupleVariable.restVariable = getBLangVariableNode(memberBindingPattern);
} else {
BLangVariable member = getBLangVariableNode(memberBindingPattern);
tupleVariable.memberVariables.add(member);
}
}
return tupleVariable;
case ERROR_BINDING_PATTERN:
ErrorBindingPatternNode errorBindingPatternNode = (ErrorBindingPatternNode) bindingPattern;
BLangErrorVariable bLangErrorVariable = (BLangErrorVariable) TreeBuilder.createErrorVariableNode();
bLangErrorVariable.pos = getPosition(errorBindingPatternNode);
Optional<Node> errorTypeRef = errorBindingPatternNode.typeReference();
if (errorTypeRef.isPresent()) {
bLangErrorVariable.typeNode = createTypeNode(errorTypeRef.get());
}
SeparatedNodeList<BindingPatternNode> argListBindingPatterns =
errorBindingPatternNode.argListBindingPatterns();
int numberOfArgs = argListBindingPatterns.size();
List<BLangErrorVariable.BLangErrorDetailEntry> namedArgs = new ArrayList<>();
for (int position = 0; position < numberOfArgs; position++) {
BindingPatternNode bindingPatternNode = argListBindingPatterns.get(position);
switch (bindingPatternNode.kind()) {
case CAPTURE_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
if (position == 0) {
bLangErrorVariable.message =
(BLangSimpleVariable) getBLangVariableNode(bindingPatternNode);
break;
}
case ERROR_BINDING_PATTERN:
bLangErrorVariable.cause = getBLangVariableNode(bindingPatternNode);
break;
case NAMED_ARG_BINDING_PATTERN:
NamedArgBindingPatternNode namedArgBindingPatternNode =
(NamedArgBindingPatternNode) bindingPatternNode;
BLangIdentifier key =
createIdentifier(namedArgBindingPatternNode.argName());
BLangVariable valueBindingPattern =
getBLangVariableNode(namedArgBindingPatternNode.bindingPattern());
BLangErrorVariable.BLangErrorDetailEntry detailEntry =
new BLangErrorVariable.BLangErrorDetailEntry(key, valueBindingPattern);
namedArgs.add(detailEntry);
break;
default:
bLangErrorVariable.restDetail =
(BLangSimpleVariable) getBLangVariableNode(bindingPatternNode);
}
}
bLangErrorVariable.detail = namedArgs;
return bLangErrorVariable;
case REST_BINDING_PATTERN:
RestBindingPatternNode restBindingPatternNode = (RestBindingPatternNode) bindingPattern;
varName = restBindingPatternNode.variableName().name();
break;
case WILDCARD_BINDING_PATTERN:
WildcardBindingPatternNode wildcardBindingPatternNode = (WildcardBindingPatternNode) bindingPattern;
varName = wildcardBindingPatternNode.underscoreToken();
break;
case CAPTURE_BINDING_PATTERN:
default:
CaptureBindingPatternNode captureBindingPatternNode = (CaptureBindingPatternNode) bindingPattern;
varName = captureBindingPatternNode.variableName();
break;
}
Location pos = getPosition(bindingPattern);
return createSimpleVariable(pos, varName, getPosition(varName));
}
BLangValueType addValueType(Location pos, TypeKind typeKind) {
BLangValueType typeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
typeNode.pos = pos;
typeNode.typeKind = typeKind;
return typeNode;
}
private List<BLangStatement> generateBLangStatements(NodeList<StatementNode> statementNodes) {
List<BLangStatement> statements = new ArrayList<>();
return generateAndAddBLangStatements(statementNodes, statements);
}
private List<BLangStatement> generateAndAddBLangStatements(NodeList<StatementNode> statementNodes,
List<BLangStatement> statements) {
for (StatementNode statement : statementNodes) {
if (statement != null) {
if (statement.kind() == SyntaxKind.FORK_STATEMENT) {
generateForkStatements(statements, (ForkStatementNode) statement);
continue;
}
statements.add((BLangStatement) statement.apply(this));
}
}
return statements;
}
private String extractVersion(SeparatedNodeList<Token> versionNumbers) {
StringBuilder version = new StringBuilder();
int size = versionNumbers.size();
for (int i = 0; i < size; i++) {
if (i != 0) {
version.append(".");
}
version.append(versionNumbers.get(i).text());
}
return version.toString();
}
private void generateForkStatements(List<BLangStatement> statements, ForkStatementNode forkStatementNode) {
BLangForkJoin forkJoin = (BLangForkJoin) forkStatementNode.apply(this);
String nextAnonymousForkKey = anonymousModelHelper.getNextAnonymousForkKey(packageID);
for (NamedWorkerDeclarationNode workerDeclarationNode : forkStatementNode.namedWorkerDeclarations()) {
BLangSimpleVariableDef workerDef = (BLangSimpleVariableDef) workerDeclarationNode.apply(this);
workerDef.isWorker = true;
workerDef.isInFork = true;
workerDef.var.flagSet.add(Flag.FORKED);
BLangFunction function = ((BLangLambdaFunction) workerDef.var.expr).function;
function.addFlag(Flag.FORKED);
function.anonForkName = nextAnonymousForkKey;
statements.add(workerDef);
while (!this.additionalStatements.empty()) {
statements.add(additionalStatements.pop());
}
forkJoin.addWorkers(workerDef);
}
statements.add(forkJoin);
}
private BLangCheckedExpr createCheckExpr(Location pos, BLangExpression expr) {
BLangCheckedExpr checkedExpr = (BLangCheckedExpr) TreeBuilder.createCheckExpressionNode();
checkedExpr.pos = pos;
checkedExpr.expr = expr;
return checkedExpr;
}
private BLangCheckPanickedExpr createCheckPanickedExpr(Location pos, BLangExpression expr) {
BLangCheckPanickedExpr checkPanickedExpr =
(BLangCheckPanickedExpr) TreeBuilder.createCheckPanicExpressionNode();
checkPanickedExpr.pos = pos;
checkPanickedExpr.expr = expr;
return checkPanickedExpr;
}
private void populateFuncSignature(BLangFunction bLFunction, FunctionSignatureNode funcSignature) {
for (ParameterNode child : funcSignature.parameters()) {
SimpleVariableNode param = (SimpleVariableNode) child.apply(this);
if (child instanceof RestParameterNode) {
bLFunction.setRestParameter(param);
} else {
bLFunction.addParameter(param);
}
}
Optional<ReturnTypeDescriptorNode> retNode = funcSignature.returnTypeDesc();
if (retNode.isPresent()) {
ReturnTypeDescriptorNode returnType = retNode.get();
bLFunction.setReturnTypeNode(createTypeNode(returnType.type()));
bLFunction.returnTypeAnnAttachments = applyAll(returnType.annotations());
} else {
BLangValueType bLValueType = (BLangValueType) TreeBuilder.createValueTypeNode();
bLValueType.pos = symTable.builtinPos;
bLValueType.typeKind = TypeKind.NIL;
bLFunction.setReturnTypeNode(bLValueType);
}
}
private BLangUnaryExpr createBLangUnaryExpr(Location location,
OperatorKind operatorKind,
BLangExpression expr) {
BLangUnaryExpr bLUnaryExpr = (BLangUnaryExpr) TreeBuilder.createUnaryExpressionNode();
bLUnaryExpr.pos = location;
bLUnaryExpr.operator = operatorKind;
bLUnaryExpr.expr = expr;
return bLUnaryExpr;
}
private BLangExpression createExpression(Node expression) {
if (expression.kind() == SyntaxKind.ASYNC_SEND_ACTION) {
dlog.error(getPosition(expression), DiagnosticErrorCode.ASYNC_SEND_NOT_YET_SUPPORTED_AS_EXPRESSION);
Token missingIdentifier = NodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN,
NodeFactory.createEmptyMinutiaeList(), NodeFactory.createEmptyMinutiaeList());
expression = NodeFactory.createSimpleNameReferenceNode(missingIdentifier);
}
return (BLangExpression) createActionOrExpression(expression);
}
private BLangNode createActionOrExpression(Node actionOrExpression) {
if (isSimpleLiteral(actionOrExpression.kind())) {
return createSimpleLiteral(actionOrExpression);
} else if (actionOrExpression.kind() == SyntaxKind.SIMPLE_NAME_REFERENCE ||
actionOrExpression.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE ||
actionOrExpression.kind() == SyntaxKind.IDENTIFIER_TOKEN) {
BLangNameReference nameReference = createBLangNameReference(actionOrExpression);
BLangSimpleVarRef bLVarRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
bLVarRef.pos = getPosition(actionOrExpression);
bLVarRef.pkgAlias = this.createIdentifier((Location) nameReference.pkgAlias.getPosition(),
nameReference.pkgAlias.getValue());
bLVarRef.variableName = this.createIdentifier((Location) nameReference.name.getPosition(),
nameReference.name.getValue());
return bLVarRef;
} else if (actionOrExpression.kind() == SyntaxKind.BRACED_EXPRESSION) {
BLangGroupExpr group = (BLangGroupExpr) TreeBuilder.createGroupExpressionNode();
group.expression = (BLangExpression) actionOrExpression.apply(this);
group.pos = getPosition(actionOrExpression);
return group;
} else if (isType(actionOrExpression.kind())) {
BLangTypedescExpr typeAccessExpr = (BLangTypedescExpr) TreeBuilder.createTypeAccessNode();
typeAccessExpr.pos = getPosition(actionOrExpression);
typeAccessExpr.typeNode = createTypeNode(actionOrExpression);
return typeAccessExpr;
} else {
return actionOrExpression.apply(this);
}
}
private BLangNode createStringTemplateLiteral(NodeList<Node> memberNodes, Location location) {
BLangStringTemplateLiteral stringTemplateLiteral =
(BLangStringTemplateLiteral) TreeBuilder.createStringTemplateLiteralNode();
for (Node memberNode : memberNodes) {
stringTemplateLiteral.exprs.add((BLangExpression) memberNode.apply(this));
}
if (stringTemplateLiteral.exprs.isEmpty()) {
BLangLiteral emptyLiteral = createEmptyLiteral();
emptyLiteral.pos = location;
stringTemplateLiteral.exprs.add(emptyLiteral);
}
stringTemplateLiteral.pos = location;
return stringTemplateLiteral;
}
private BLangRawTemplateLiteral createRawTemplateLiteral(NodeList<Node> members, Location location) {
BLangRawTemplateLiteral literal = (BLangRawTemplateLiteral) TreeBuilder.createRawTemplateLiteralNode();
literal.pos = location;
boolean prevNodeWasInterpolation = false;
Node firstMember = members.isEmpty() ? null : members.get(0);
if (firstMember != null && firstMember.kind() == SyntaxKind.INTERPOLATION) {
literal.strings.add(createStringLiteral("", getPosition(firstMember)));
}
for (Node member : members) {
if (member.kind() == SyntaxKind.INTERPOLATION) {
literal.insertions.add((BLangExpression) member.apply(this));
if (prevNodeWasInterpolation) {
literal.strings.add(createStringLiteral("", getPosition(member)));
}
prevNodeWasInterpolation = true;
} else {
literal.strings.add((BLangLiteral) member.apply(this));
prevNodeWasInterpolation = false;
}
}
if (prevNodeWasInterpolation) {
literal.strings.add(createStringLiteral("", getPosition(members.get(members.size() - 1))));
}
return literal;
}
private BLangSimpleVariable createSimpleVar(Optional<Token> name, Node type, NodeList<AnnotationNode> annotations) {
if (name.isPresent()) {
Token nameToken = name.get();
return createSimpleVar(nameToken, type, null, null, annotations);
}
return createSimpleVar(null, type, null, null, annotations);
}
private BLangSimpleVariable createSimpleVar(Token name, Node type, NodeList<AnnotationNode> annotations) {
return createSimpleVar(name, type, null, null, annotations);
}
private BLangSimpleVariable createSimpleVar(Token name, Node typeName, Node initializer,
Token visibilityQualifier, NodeList<AnnotationNode> annotations) {
BLangSimpleVariable bLSimpleVar = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
bLSimpleVar.setName(this.createIdentifier(name));
bLSimpleVar.name.pos = getPosition(name);
if (isDeclaredWithVar(typeName)) {
bLSimpleVar.isDeclaredWithVar = true;
} else {
bLSimpleVar.setTypeNode(createTypeNode(typeName));
}
if (visibilityQualifier != null) {
if (visibilityQualifier.kind() == SyntaxKind.PRIVATE_KEYWORD) {
bLSimpleVar.flagSet.add(Flag.PRIVATE);
} else if (visibilityQualifier.kind() == SyntaxKind.PUBLIC_KEYWORD) {
bLSimpleVar.flagSet.add(Flag.PUBLIC);
}
}
if (initializer != null) {
bLSimpleVar.setInitialExpression(createExpression(initializer));
}
if (annotations != null) {
bLSimpleVar.annAttachments = applyAll(annotations);
}
return bLSimpleVar;
}
private boolean isDeclaredWithVar(Node typeNode) {
if (typeNode == null || typeNode.kind() == SyntaxKind.VAR_TYPE_DESC) {
return true;
}
return false;
}
private BLangIdentifier createIdentifier(Token token) {
return createIdentifier(getPosition(token), token);
}
private BLangIdentifier createIdentifier(Location pos, Token token) {
if (token == null) {
return createIdentifier(pos, null, null);
}
String identifierName;
if (token.isMissing()) {
identifierName = missingNodesHelper.getNextMissingNodeName(packageID);
} else {
identifierName = token.text();
}
return createIdentifier(pos, identifierName);
}
private BLangIdentifier createIdentifier(Location pos, String value) {
return createIdentifier(pos, value, null);
}
private BLangIdentifier createIdentifier(Location pos, String value, Set<Whitespace> ws) {
BLangIdentifier bLIdentifer = (BLangIdentifier) TreeBuilder.createIdentifierNode();
if (value == null) {
return bLIdentifer;
}
if (value.startsWith(IDENTIFIER_LITERAL_PREFIX)) {
bLIdentifer.setValue(IdentifierUtils.unescapeUnicodeCodepoints(value.substring(1)));
bLIdentifer.originalValue = value;
bLIdentifer.setLiteral(true);
} else {
bLIdentifer.setValue(IdentifierUtils.unescapeUnicodeCodepoints(value));
bLIdentifer.setLiteral(false);
}
bLIdentifer.pos = pos;
if (ws != null) {
bLIdentifer.addWS(ws);
}
return bLIdentifer;
}
private BLangLiteral createEmptyStringLiteral(Location pos) {
BLangLiteral bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
bLiteral.pos = pos;
bLiteral.type = symTable.stringType;
bLiteral.value = "";
bLiteral.originalValue = "";
return bLiteral;
}
private BLangLiteral createSimpleLiteral(Node literal) {
return createSimpleLiteral(literal, false);
}
private BLangLiteral createSimpleLiteral(Node literal, boolean isFiniteType) {
if (literal.kind() == SyntaxKind.UNARY_EXPRESSION) {
UnaryExpressionNode unaryExpr = (UnaryExpressionNode) literal;
BLangLiteral bLangLiteral =
createSimpleLiteral(unaryExpr.expression(), unaryExpr.unaryOperator().kind(), isFiniteType);
bLangLiteral.pos = getPosition(unaryExpr);
return bLangLiteral;
}
return createSimpleLiteral(literal, SyntaxKind.NONE, isFiniteType);
}
private BLangLiteral createSimpleLiteral(Node literal, SyntaxKind sign, boolean isFiniteType) {
BLangLiteral bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
SyntaxKind type = literal.kind();
int typeTag = -1;
Object value = null;
String originalValue = null;
String textValue;
if (literal instanceof BasicLiteralNode) {
textValue = ((BasicLiteralNode) literal).literalToken().text();
} else if (literal instanceof Token) {
textValue = ((Token) literal).text();
} else {
textValue = "";
}
if (sign == SyntaxKind.PLUS_TOKEN) {
textValue = "+" + textValue;
} else if (sign == SyntaxKind.MINUS_TOKEN) {
textValue = "-" + textValue;
}
if (type == SyntaxKind.NUMERIC_LITERAL) {
SyntaxKind literalTokenKind = ((BasicLiteralNode) literal).literalToken().kind();
if (literalTokenKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN ||
literalTokenKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) {
typeTag = TypeTags.INT;
value = getIntegerLiteral(literal, textValue, sign);
originalValue = textValue;
bLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression();
if (literalTokenKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN && withinByteRange(value)) {
typeTag = TypeTags.BYTE;
}
} else if (literalTokenKind == SyntaxKind.DECIMAL_FLOATING_POINT_LITERAL_TOKEN) {
typeTag = NumericLiteralSupport.isDecimalDiscriminated(textValue) ? TypeTags.DECIMAL : TypeTags.FLOAT;
if (isFiniteType) {
value = textValue.replaceAll("[fd+]", "");
originalValue = textValue.replace("+", "");
} else {
value = textValue;
originalValue = textValue;
}
bLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression();
} else if (literalTokenKind == SyntaxKind.HEX_FLOATING_POINT_LITERAL_TOKEN) {
typeTag = TypeTags.FLOAT;
value = getHexNodeValue(textValue);
originalValue = textValue;
bLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression();
}
} else if (type == SyntaxKind.BOOLEAN_LITERAL) {
typeTag = TypeTags.BOOLEAN;
value = Boolean.parseBoolean(textValue);
originalValue = textValue;
bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
} else if (type == SyntaxKind.STRING_LITERAL || type == SyntaxKind.XML_TEXT_CONTENT ||
type == SyntaxKind.TEMPLATE_STRING || type == SyntaxKind.IDENTIFIER_TOKEN) {
String text = textValue;
if (type == SyntaxKind.STRING_LITERAL) {
if (text.length() > 1 && text.charAt(text.length() - 1) == '"') {
text = text.substring(1, text.length() - 1);
} else {
text = text.substring(1);
}
}
String originalText = text;
Matcher matcher = IdentifierUtils.UNICODE_PATTERN.matcher(text);
int position = 0;
while (matcher.find(position)) {
String hexStringVal = matcher.group(1);
int hexDecimalVal = Integer.parseInt(hexStringVal, 16);
if ((hexDecimalVal >= Constants.MIN_UNICODE && hexDecimalVal <= Constants.MIDDLE_LIMIT_UNICODE)
|| hexDecimalVal > Constants.MAX_UNICODE) {
String hexStringWithBraces = matcher.group(0);
int offset = originalText.indexOf(hexStringWithBraces) + 1;
Location pos = getPosition(literal);
dlog.error(new BLangDiagnosticLocation(currentCompUnitName,
pos.lineRange().startLine().line(),
pos.lineRange().endLine().line(),
pos.lineRange().startLine().offset() + offset,
pos.lineRange().startLine().offset() + offset + hexStringWithBraces.length()),
DiagnosticErrorCode.INVALID_UNICODE, hexStringWithBraces);
}
text = matcher.replaceFirst("\\\\u" + fillWithZeros(hexStringVal));
position = matcher.end() - 2;
matcher = IdentifierUtils.UNICODE_PATTERN.matcher(text);
}
if (type != SyntaxKind.TEMPLATE_STRING && type != SyntaxKind.XML_TEXT_CONTENT) {
text = StringEscapeUtils.unescapeJava(text);
}
typeTag = TypeTags.STRING;
value = text;
originalValue = textValue;
bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
} else if (type == SyntaxKind.NIL_LITERAL) {
originalValue = "()";
typeTag = TypeTags.NIL;
value = null;
bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
} else if (type == SyntaxKind.NULL_LITERAL) {
originalValue = "null";
typeTag = TypeTags.NIL;
value = null;
bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
} else if (type == SyntaxKind.BINARY_EXPRESSION) {
typeTag = TypeTags.BYTE_ARRAY;
value = textValue;
originalValue = textValue;
if (isNumericLiteral(type)) {
bLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression();
} else {
bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
}
} else if (type == SyntaxKind.BYTE_ARRAY_LITERAL) {
return (BLangLiteral) literal.apply(this);
}
bLiteral.pos = getPosition(literal);
bLiteral.type = symTable.getTypeFromTag(typeTag);
bLiteral.type.tag = typeTag;
bLiteral.value = value;
bLiteral.originalValue = originalValue;
return bLiteral;
}
private BLangLiteral createStringLiteral(String value, Location pos) {
BLangLiteral strLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
strLiteral.value = strLiteral.originalValue = value;
strLiteral.type = symTable.stringType;
strLiteral.pos = pos;
return strLiteral;
}
private BLangType createTypeNode(Node type) {
if (type instanceof BuiltinSimpleNameReferenceNode || type.kind() == SyntaxKind.NIL_TYPE_DESC) {
return createBuiltInTypeNode(type);
} else if (type.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE || type.kind() == SyntaxKind.IDENTIFIER_TOKEN) {
BLangUserDefinedType bLUserDefinedType = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode();
BLangNameReference nameReference = createBLangNameReference(type);
bLUserDefinedType.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
bLUserDefinedType.typeName = (BLangIdentifier) nameReference.name;
bLUserDefinedType.pos = getPosition(type);
return bLUserDefinedType;
} else if (type.kind() == SyntaxKind.SIMPLE_NAME_REFERENCE) {
if (type.hasDiagnostics()) {
BLangUserDefinedType bLUserDefinedType = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode();
BLangIdentifier pkgAlias = this.createIdentifier(null, "");
BLangIdentifier name = this.createIdentifier(((SimpleNameReferenceNode) type).name());
BLangNameReference nameReference = new BLangNameReference(getPosition(type), null, pkgAlias, name);
bLUserDefinedType.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
bLUserDefinedType.typeName = (BLangIdentifier) nameReference.name;
bLUserDefinedType.pos = getPosition(type);
return bLUserDefinedType;
}
SimpleNameReferenceNode nameReferenceNode = (SimpleNameReferenceNode) type;
return createTypeNode(nameReferenceNode.name());
}
return (BLangType) type.apply(this);
}
private BLangType createBuiltInTypeNode(Node type) {
String typeText;
if (type.kind() == SyntaxKind.NIL_TYPE_DESC) {
typeText = "()";
} else if (type instanceof BuiltinSimpleNameReferenceNode) {
BuiltinSimpleNameReferenceNode simpleNameRef = (BuiltinSimpleNameReferenceNode) type;
if (simpleNameRef.kind() == SyntaxKind.VAR_TYPE_DESC) {
return null;
} else if (simpleNameRef.name().isMissing()) {
String name = missingNodesHelper.getNextMissingNodeName(packageID);
BLangIdentifier identifier = createIdentifier(getPosition(simpleNameRef.name()), name);
BLangIdentifier pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
return createUserDefinedType(getPosition(type), pkgAlias, identifier);
}
typeText = simpleNameRef.name().text();
} else {
typeText = ((Token) type).text();
}
TypeKind typeKind = TreeUtils.stringToTypeKind(typeText.replaceAll("\\s+", ""));
SyntaxKind kind = type.kind();
switch (kind) {
case BOOLEAN_TYPE_DESC:
case INT_TYPE_DESC:
case BYTE_TYPE_DESC:
case FLOAT_TYPE_DESC:
case DECIMAL_TYPE_DESC:
case STRING_TYPE_DESC:
case ANY_TYPE_DESC:
case NIL_TYPE_DESC:
case HANDLE_TYPE_DESC:
case ANYDATA_TYPE_DESC:
case READONLY_TYPE_DESC:
BLangValueType valueType = (BLangValueType) TreeBuilder.createValueTypeNode();
valueType.typeKind = typeKind;
valueType.pos = getPosition(type);
return valueType;
default:
BLangBuiltInRefTypeNode builtInValueType =
(BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
builtInValueType.typeKind = typeKind;
builtInValueType.pos = getPosition(type);
return builtInValueType;
}
}
private VariableNode createBasicVarNodeWithoutType(Location location, Set<Whitespace> ws,
String identifier, Location identifierLocation,
ExpressionNode expr) {
BLangSimpleVariable bLSimpleVar = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
bLSimpleVar.pos = location;
IdentifierNode name = this.createIdentifier(identifierLocation, identifier, ws);
((BLangIdentifier) name).pos = identifierLocation;
bLSimpleVar.setName(name);
bLSimpleVar.addWS(ws);
if (expr != null) {
bLSimpleVar.setInitialExpression(expr);
}
return bLSimpleVar;
}
private BLangInvocation createBLangInvocation(Node nameNode, NodeList<FunctionArgumentNode> arguments,
Location position, boolean isAsync) {
BLangInvocation bLInvocation;
if (isAsync) {
bLInvocation = (BLangInvocation) TreeBuilder.createActionInvocation();
} else {
bLInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
}
BLangNameReference reference = createBLangNameReference(nameNode);
bLInvocation.pkgAlias = (BLangIdentifier) reference.pkgAlias;
bLInvocation.name = (BLangIdentifier) reference.name;
List<BLangExpression> args = new ArrayList<>();
arguments.iterator().forEachRemaining(arg -> args.add(createExpression(arg)));
bLInvocation.argExprs = args;
bLInvocation.pos = position;
return bLInvocation;
}
private BLangNameReference createBLangNameReference(Node node) {
switch (node.kind()) {
case QUALIFIED_NAME_REFERENCE:
QualifiedNameReferenceNode iNode = (QualifiedNameReferenceNode) node;
Token modulePrefix = iNode.modulePrefix();
IdentifierToken identifier = iNode.identifier();
BLangIdentifier pkgAlias = this.createIdentifier(getPosition(modulePrefix), modulePrefix);
Location namePos = getPosition(identifier);
BLangIdentifier name = this.createIdentifier(namePos, identifier);
return new BLangNameReference(getPosition(node), null, pkgAlias, name);
case ERROR_TYPE_DESC:
node = ((BuiltinSimpleNameReferenceNode) node).name();
break;
case NEW_KEYWORD:
case IDENTIFIER_TOKEN:
case ERROR_KEYWORD:
break;
case SIMPLE_NAME_REFERENCE:
default:
node = ((SimpleNameReferenceNode) node).name();
break;
}
Token iToken = (Token) node;
BLangIdentifier pkgAlias = this.createIdentifier(symTable.builtinPos, "");
BLangIdentifier name = this.createIdentifier(iToken);
return new BLangNameReference(getPosition(node), null, pkgAlias, name);
}
private BLangMarkdownDocumentation createMarkdownDocumentationAttachment(Optional<Node> markdownDocumentationNode) {
if (markdownDocumentationNode == null || !markdownDocumentationNode.isPresent()) {
return null;
}
BLangMarkdownDocumentation doc = (BLangMarkdownDocumentation) TreeBuilder.createMarkdownDocumentationNode();
LinkedList<BLangMarkdownDocumentationLine> documentationLines = new LinkedList<>();
LinkedList<BLangMarkdownParameterDocumentation> parameters = new LinkedList<>();
LinkedList<BLangMarkdownReferenceDocumentation> references = new LinkedList<>();
MarkdownDocumentationNode markdownDocNode = (MarkdownDocumentationNode) markdownDocumentationNode.get();
NodeList<Node> docLineList = markdownDocNode.documentationLines();
BLangMarkdownParameterDocumentation bLangParaDoc = null;
BLangMarkdownReturnParameterDocumentation bLangReturnParaDoc = null;
BLangMarkDownDeprecationDocumentation bLangDeprecationDoc = null;
BLangMarkDownDeprecatedParametersDocumentation bLangDeprecatedParaDoc = null;
for (Node singleDocLine : docLineList) {
switch (singleDocLine.kind()) {
case MARKDOWN_DOCUMENTATION_LINE:
case MARKDOWN_REFERENCE_DOCUMENTATION_LINE:
MarkdownDocumentationLineNode docLineNode = (MarkdownDocumentationLineNode) singleDocLine;
NodeList<Node> docElements = docLineNode.documentElements();
String docText = addReferencesAndReturnDocumentationText(references, docElements);
if (bLangDeprecationDoc != null) {
bLangDeprecationDoc.deprecationDocumentationLines.add(docText);
} else if (bLangReturnParaDoc != null) {
bLangReturnParaDoc.returnParameterDocumentationLines.add(docText);
} else if (bLangParaDoc != null) {
bLangParaDoc.parameterDocumentationLines.add(docText);
} else {
BLangMarkdownDocumentationLine bLangDocLine =
(BLangMarkdownDocumentationLine) TreeBuilder.createMarkdownDocumentationTextNode();
bLangDocLine.text = docText;
bLangDocLine.pos = getPosition(docLineNode);
documentationLines.add(bLangDocLine);
}
break;
case MARKDOWN_PARAMETER_DOCUMENTATION_LINE:
bLangParaDoc = new BLangMarkdownParameterDocumentation();
MarkdownParameterDocumentationLineNode parameterDocLineNode =
(MarkdownParameterDocumentationLineNode) singleDocLine;
BLangIdentifier paraName = new BLangIdentifier();
Token parameterName = parameterDocLineNode.parameterName();
paraName.value = parameterName.isMissing() ? "" : parameterName.text();
bLangParaDoc.parameterName = paraName;
NodeList<Node> paraDocElements = parameterDocLineNode.documentElements();
String paraDocText = addReferencesAndReturnDocumentationText(references, paraDocElements);
bLangParaDoc.parameterDocumentationLines.add(paraDocText);
bLangParaDoc.pos = getPosition(parameterName);
if (bLangDeprecatedParaDoc != null) {
bLangDeprecatedParaDoc.parameters.add(bLangParaDoc);
} else if (bLangDeprecationDoc != null) {
bLangDeprecatedParaDoc =
new BLangMarkDownDeprecatedParametersDocumentation();
bLangDeprecatedParaDoc.parameters.add(bLangParaDoc);
bLangDeprecationDoc = null;
} else {
parameters.add(bLangParaDoc);
}
break;
case MARKDOWN_RETURN_PARAMETER_DOCUMENTATION_LINE:
bLangReturnParaDoc = new BLangMarkdownReturnParameterDocumentation();
MarkdownParameterDocumentationLineNode returnParaDocLineNode =
(MarkdownParameterDocumentationLineNode) singleDocLine;
NodeList<Node> returnParaDocElements = returnParaDocLineNode.documentElements();
String returnParaDocText =
addReferencesAndReturnDocumentationText(references, returnParaDocElements);
bLangReturnParaDoc.returnParameterDocumentationLines.add(returnParaDocText);
bLangReturnParaDoc.pos = getPosition(returnParaDocLineNode);
doc.returnParameter = bLangReturnParaDoc;
break;
case MARKDOWN_DEPRECATION_DOCUMENTATION_LINE:
bLangDeprecationDoc = new BLangMarkDownDeprecationDocumentation();
MarkdownDocumentationLineNode deprecationDocLineNode =
(MarkdownDocumentationLineNode) singleDocLine;
String lineText = ((Token) deprecationDocLineNode.documentElements().get(0)).text();
bLangDeprecationDoc.addDeprecationLine("
bLangDeprecationDoc.pos = getPosition(deprecationDocLineNode);
break;
case MARKDOWN_CODE_BLOCK:
MarkdownCodeBlockNode codeBlockNode = (MarkdownCodeBlockNode) singleDocLine;
transformCodeBlock(documentationLines, codeBlockNode);
break;
default:
break;
}
}
doc.documentationLines = documentationLines;
doc.parameters = parameters;
doc.references = references;
doc.deprecationDocumentation = bLangDeprecationDoc;
doc.deprecatedParametersDocumentation = bLangDeprecatedParaDoc;
doc.pos = getPosition(markdownDocNode);
return doc;
}
private void transformCodeBlock(LinkedList<BLangMarkdownDocumentationLine> documentationLines,
MarkdownCodeBlockNode codeBlockNode) {
BLangMarkdownDocumentationLine bLangDocLine =
(BLangMarkdownDocumentationLine) TreeBuilder.createMarkdownDocumentationTextNode();
StringBuilder docText = new StringBuilder();
if (codeBlockNode.langAttribute().isPresent()) {
docText.append(codeBlockNode.startBacktick().text());
docText.append(codeBlockNode.langAttribute().get().toString());
} else {
docText.append(codeBlockNode.startBacktick().toString());
}
codeBlockNode.codeLines().forEach(codeLine -> docText.append(codeLine.toString()));
docText.append(codeBlockNode.endLineHashToken().toString());
docText.append(codeBlockNode.endBacktick().text());
bLangDocLine.text = docText.toString();
bLangDocLine.pos = getPosition(codeBlockNode.startLineHashToken());
documentationLines.add(bLangDocLine);
}
private String addReferencesAndReturnDocumentationText(LinkedList<BLangMarkdownReferenceDocumentation> references,
NodeList<Node> docElements) {
StringBuilder docText = new StringBuilder();
for (Node element : docElements) {
if (element.kind() == SyntaxKind.BALLERINA_NAME_REFERENCE) {
BLangMarkdownReferenceDocumentation bLangRefDoc = new BLangMarkdownReferenceDocumentation();
BallerinaNameReferenceNode balNameRefNode = (BallerinaNameReferenceNode) element;
bLangRefDoc.pos = getPosition(balNameRefNode);
Token startBacktick = balNameRefNode.startBacktick();
Node backtickContent = balNameRefNode.nameReference();
Token endBacktick = balNameRefNode.endBacktick();
String contentString = backtickContent.isMissing() ? "" : backtickContent.toString();
bLangRefDoc.referenceName = contentString;
bLangRefDoc.type = DocumentationReferenceType.BACKTICK_CONTENT;
Optional<Token> referenceType = balNameRefNode.referenceType();
referenceType.ifPresent(
refType -> {
bLangRefDoc.type = stringToRefType(refType.text());
docText.append(refType.toString());
}
);
transformDocumentationBacktickContent(backtickContent, bLangRefDoc);
docText.append(startBacktick.isMissing() ? "" : startBacktick.text());
docText.append(contentString);
docText.append(endBacktick.isMissing() ? "" : endBacktick.text());
references.add(bLangRefDoc);
} else if (element.kind() == SyntaxKind.DOCUMENTATION_DESCRIPTION) {
Token docDescription = (Token) element;
docText.append(docDescription.text());
} else if (element.kind() == SyntaxKind.INLINE_CODE_REFERENCE) {
InlineCodeReferenceNode inlineCodeRefNode = (InlineCodeReferenceNode) element;
docText.append(inlineCodeRefNode.startBacktick().text());
docText.append(inlineCodeRefNode.codeReference().text());
docText.append(inlineCodeRefNode.endBacktick().text());
}
}
return trimLeftAtMostOne(docText.toString());
}
private String trimLeftAtMostOne(String text) {
int countToStrip = 0;
if (!text.isEmpty() && Character.isWhitespace(text.charAt(0))) {
countToStrip = 1;
}
return text.substring(countToStrip);
}
private void transformDocumentationBacktickContent(Node backtickContent,
BLangMarkdownReferenceDocumentation bLangRefDoc) {
QualifiedNameReferenceNode qualifiedRef;
SimpleNameReferenceNode simpleRef;
switch (backtickContent.kind()) {
case CODE_CONTENT:
bLangRefDoc.hasParserWarnings = true;
break;
case QUALIFIED_NAME_REFERENCE:
qualifiedRef = (QualifiedNameReferenceNode) backtickContent;
bLangRefDoc.qualifier = qualifiedRef.modulePrefix().text();
bLangRefDoc.identifier = qualifiedRef.identifier().text();
break;
case SIMPLE_NAME_REFERENCE:
simpleRef = (SimpleNameReferenceNode) backtickContent;
bLangRefDoc.identifier = simpleRef.name().text();
break;
case FUNCTION_CALL:
Node funcName = (((FunctionCallExpressionNode) backtickContent).functionName());
if (funcName.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
qualifiedRef = (QualifiedNameReferenceNode) funcName;
bLangRefDoc.qualifier = qualifiedRef.modulePrefix().text();
bLangRefDoc.identifier = qualifiedRef.identifier().text();
} else {
simpleRef = (SimpleNameReferenceNode) funcName;
bLangRefDoc.identifier = simpleRef.name().text();
}
break;
case METHOD_CALL:
MethodCallExpressionNode methodCallExprNode = (MethodCallExpressionNode) backtickContent;
bLangRefDoc.identifier =
((SimpleNameReferenceNode) methodCallExprNode.methodName()).name().text();
Node refName = methodCallExprNode.expression();
if (refName.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
qualifiedRef = (QualifiedNameReferenceNode) refName;
bLangRefDoc.qualifier = qualifiedRef.modulePrefix().text();
bLangRefDoc.typeName = qualifiedRef.identifier().text();
} else {
simpleRef = (SimpleNameReferenceNode) refName;
bLangRefDoc.typeName = simpleRef.name().text();
}
break;
default:
throw new IllegalArgumentException("Invalid backtick content transformation");
}
}
private DocumentationReferenceType stringToRefType(String refTypeName) {
switch (refTypeName) {
case "type":
return DocumentationReferenceType.TYPE;
case "service":
return DocumentationReferenceType.SERVICE;
case "variable":
return DocumentationReferenceType.VARIABLE;
case "var":
return DocumentationReferenceType.VAR;
case "annotation":
return DocumentationReferenceType.ANNOTATION;
case "module":
return DocumentationReferenceType.MODULE;
case "function":
return DocumentationReferenceType.FUNCTION;
case "parameter":
return DocumentationReferenceType.PARAMETER;
case "const":
return DocumentationReferenceType.CONST;
default:
return DocumentationReferenceType.BACKTICK_CONTENT;
}
}
private Object getIntegerLiteral(Node literal, String nodeValue, SyntaxKind sign) {
SyntaxKind literalTokenKind = ((BasicLiteralNode) literal).literalToken().kind();
if (literalTokenKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN) {
return parseLong(literal, nodeValue, nodeValue, 10, sign, DiagnosticErrorCode.INTEGER_TOO_SMALL,
DiagnosticErrorCode.INTEGER_TOO_LARGE);
} else if (literalTokenKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) {
String processedNodeValue = nodeValue.toLowerCase().replace("0x", "");
return parseLong(literal, nodeValue, processedNodeValue, 16, sign,
DiagnosticErrorCode.HEXADECIMAL_TOO_SMALL, DiagnosticErrorCode.HEXADECIMAL_TOO_LARGE);
}
return null;
}
private Object parseLong(Node literal, String originalNodeValue,
String processedNodeValue, int radix, SyntaxKind sign,
DiagnosticCode code1, DiagnosticCode code2) {
try {
return Long.parseLong(processedNodeValue, radix);
} catch (Exception e) {
Location pos = getPosition(literal);
if (sign == SyntaxKind.MINUS_TOKEN) {
pos = new BLangDiagnosticLocation(pos.lineRange().filePath(),
pos.lineRange().startLine().line(),
pos.lineRange().endLine().line(),
pos.lineRange().startLine().offset() - 1,
pos.lineRange().endLine().offset());
dlog.error(pos, code1, originalNodeValue);
} else {
dlog.error(pos, code2, originalNodeValue);
}
}
return originalNodeValue;
}
private String getHexNodeValue(String value) {
if (!(value.contains("p") || value.contains("P"))) {
value = value + "p0";
}
return value;
}
private String fillWithZeros(String str) {
while (str.length() < 4) {
str = "0".concat(str);
}
return str;
}
private void markVariableWithFlag(BLangVariable variable, Flag flag) {
variable.flagSet.add(flag);
switch (variable.getKind()) {
case TUPLE_VARIABLE:
BLangTupleVariable tupleVariable = (BLangTupleVariable) variable;
for (BLangVariable var : tupleVariable.memberVariables) {
markVariableWithFlag(var, flag);
}
if (tupleVariable.restVariable != null) {
markVariableWithFlag(tupleVariable.restVariable, flag);
}
break;
case RECORD_VARIABLE:
BLangRecordVariable recordVariable = (BLangRecordVariable) variable;
for (BLangRecordVariableKeyValue keyValue : recordVariable.variableList) {
markVariableWithFlag(keyValue.getValue(), flag);
}
if (recordVariable.restParam != null) {
markVariableWithFlag((BLangVariable) recordVariable.restParam, flag);
}
break;
case ERROR_VARIABLE:
BLangErrorVariable errorVariable = (BLangErrorVariable) variable;
BLangSimpleVariable message = errorVariable.message;
if (message != null) {
markVariableWithFlag(message, flag);
}
BLangVariable cause = errorVariable.cause;
if (cause != null) {
markVariableWithFlag(cause, flag);
}
errorVariable.detail.forEach(entry -> markVariableWithFlag(entry.valueBindingPattern, flag));
if (errorVariable.restDetail != null) {
markVariableWithFlag(errorVariable.restDetail, flag);
}
break;
}
}
private boolean isSimpleLiteral(SyntaxKind syntaxKind) {
switch (syntaxKind) {
case STRING_LITERAL:
case NUMERIC_LITERAL:
case BOOLEAN_LITERAL:
case NIL_LITERAL:
case NULL_LITERAL:
return true;
default:
return false;
}
}
static boolean isType(SyntaxKind nodeKind) {
switch (nodeKind) {
case RECORD_TYPE_DESC:
case OBJECT_TYPE_DESC:
case NIL_TYPE_DESC:
case OPTIONAL_TYPE_DESC:
case ARRAY_TYPE_DESC:
case INT_TYPE_DESC:
case BYTE_TYPE_DESC:
case FLOAT_TYPE_DESC:
case DECIMAL_TYPE_DESC:
case STRING_TYPE_DESC:
case BOOLEAN_TYPE_DESC:
case XML_TYPE_DESC:
case JSON_TYPE_DESC:
case HANDLE_TYPE_DESC:
case ANY_TYPE_DESC:
case ANYDATA_TYPE_DESC:
case NEVER_TYPE_DESC:
case VAR_TYPE_DESC:
case SERVICE_TYPE_DESC:
case PARAMETERIZED_TYPE_DESC:
case UNION_TYPE_DESC:
case ERROR_TYPE_DESC:
case STREAM_TYPE_DESC:
case TABLE_TYPE_DESC:
case FUNCTION_TYPE_DESC:
case TUPLE_TYPE_DESC:
case PARENTHESISED_TYPE_DESC:
case READONLY_TYPE_DESC:
case DISTINCT_TYPE_DESC:
case INTERSECTION_TYPE_DESC:
case SINGLETON_TYPE_DESC:
case TYPE_REFERENCE_TYPE_DESC:
return true;
default:
return false;
}
}
private boolean isNumericLiteral(SyntaxKind syntaxKind) {
switch (syntaxKind) {
case NUMERIC_LITERAL:
return true;
default:
return false;
}
}
private boolean isPresent(Node node) {
return node.kind() != SyntaxKind.NONE;
}
private boolean checkIfAnonymous(Node node) {
SyntaxKind parentKind = node.parent().kind();
return parentKind != SyntaxKind.DISTINCT_TYPE_DESC && parentKind != SyntaxKind.TYPE_DEFINITION;
}
private boolean ifInLocalContext(Node parent) {
while (parent != null) {
if (parent instanceof StatementNode) {
return true;
}
parent = parent.parent();
}
return false;
}
private BLangType createAnonymousRecordType(RecordTypeDescriptorNode recordTypeDescriptorNode,
BLangRecordTypeNode recordTypeNode) {
BLangTypeDefinition typeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
Location pos = getPosition(recordTypeDescriptorNode);
String genName = anonymousModelHelper.getNextAnonymousTypeKey(this.packageID);
IdentifierNode anonTypeGenName = createIdentifier(pos, genName, null);
typeDef.setName(anonTypeGenName);
typeDef.flagSet.add(Flag.PUBLIC);
typeDef.flagSet.add(Flag.ANONYMOUS);
typeDef.typeNode = recordTypeNode;
typeDef.pos = pos;
addToTop(typeDef);
return createUserDefinedType(pos, (BLangIdentifier) TreeBuilder.createIdentifierNode(), typeDef.name);
}
private BLangUserDefinedType createUserDefinedType(Location pos,
BLangIdentifier pkgAlias,
BLangIdentifier name) {
BLangUserDefinedType userDefinedType = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode();
userDefinedType.pos = pos;
userDefinedType.pkgAlias = pkgAlias;
userDefinedType.typeName = name;
return userDefinedType;
}
private boolean withinByteRange(Object num) {
if (num instanceof Long) {
return (Long) num <= 255 && (Long) num >= 0;
}
return false;
}
private class SimpleVarBuilder {
private BLangIdentifier name;
private BLangType type;
private boolean isDeclaredWithVar;
private Set<Flag> flags = new HashSet<>();
private boolean isFinal;
private ExpressionNode expr;
private Location pos;
public BLangSimpleVariable build() {
BLangSimpleVariable bLSimpleVar = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
bLSimpleVar.setName(this.name);
bLSimpleVar.setTypeNode(this.type);
bLSimpleVar.isDeclaredWithVar = this.isDeclaredWithVar;
bLSimpleVar.setTypeNode(this.type);
bLSimpleVar.flagSet.addAll(this.flags);
if (this.isFinal) {
markVariableWithFlag(bLSimpleVar, Flag.FINAL);
}
bLSimpleVar.setInitialExpression(this.expr);
bLSimpleVar.pos = pos;
return bLSimpleVar;
}
public SimpleVarBuilder with(String name) {
this.name = createIdentifier(null, name);
return this;
}
public SimpleVarBuilder with(String name, Location identifierPos) {
this.name = createIdentifier(identifierPos, name);
return this;
}
public SimpleVarBuilder with(Token token) {
this.name = createIdentifier(token);
return this;
}
public SimpleVarBuilder setTypeByNode(Node typeName) {
this.isDeclaredWithVar = typeName == null || typeName.kind() == SyntaxKind.VAR_TYPE_DESC;
if (typeName == null) {
return this;
}
this.type = createTypeNode(typeName);
return this;
}
public SimpleVarBuilder setExpressionByNode(Node initExprNode) {
this.expr = initExprNode != null ? createExpression(initExprNode) : null;
return this;
}
public SimpleVarBuilder setExpression(ExpressionNode expression) {
this.expr = expression;
return this;
}
public SimpleVarBuilder isDeclaredWithVar() {
this.isDeclaredWithVar = true;
return this;
}
public SimpleVarBuilder isFinal() {
this.isFinal = true;
return this;
}
public SimpleVarBuilder isListenerVar() {
this.flags.add(Flag.LISTENER);
this.flags.add(Flag.FINAL);
return this;
}
public SimpleVarBuilder setVisibility(Token visibilityQualifier) {
if (visibilityQualifier != null) {
if (visibilityQualifier.kind() == SyntaxKind.PRIVATE_KEYWORD) {
this.flags.add(Flag.PRIVATE);
} else if (visibilityQualifier.kind() == SyntaxKind.PUBLIC_KEYWORD) {
this.flags.add(Flag.PUBLIC);
}
}
return this;
}
public SimpleVarBuilder setFinal(boolean present) {
this.isFinal = present;
return this;
}
public SimpleVarBuilder setOptional(boolean present) {
if (present) {
this.flags.add(Flag.PUBLIC);
} else {
this.flags.remove(Flag.PUBLIC);
}
return this;
}
public SimpleVarBuilder setRequired(boolean present) {
if (present) {
this.flags.add(Flag.REQUIRED);
} else {
this.flags.remove(Flag.REQUIRED);
}
return this;
}
public SimpleVarBuilder isPublic() {
this.flags.add(Flag.PUBLIC);
return this;
}
public SimpleVarBuilder isWorkerVar() {
this.flags.add(Flag.WORKER);
return this;
}
public SimpleVarBuilder setPos(Location pos) {
this.pos = pos;
return this;
}
}
private void addFinalQualifier(BLangSimpleVariable simpleVar) {
simpleVar.flagSet.add(Flag.FINAL);
}
private void addToTop(TopLevelNode topLevelNode) {
if (currentCompilationUnit != null) {
currentCompilationUnit.addTopLevelNode(topLevelNode);
}
}
private Location expandLeft(Location location, Location upTo) {
assert location.lineRange().startLine().line() > upTo.lineRange().startLine().line() ||
(location.lineRange().startLine().line() == upTo.lineRange().startLine().line() &&
location.lineRange().startLine().offset() >= upTo.lineRange().startLine().offset());
Location expandedLocation = new BLangDiagnosticLocation(location.lineRange().filePath(),
upTo.lineRange().startLine().line(),
location.lineRange().endLine().line(),
upTo.lineRange().startLine().offset(),
location.lineRange().endLine().offset());
return expandedLocation;
}
private Location trimLeft(Location location, Location upTo) {
assert location.lineRange().startLine().line() < upTo.lineRange().startLine().line() ||
(location.lineRange().startLine().line() == upTo.lineRange().startLine().line() &&
location.lineRange().startLine().offset() <= upTo.lineRange().startLine().offset());
Location trimmedLocation = new BLangDiagnosticLocation(location.lineRange().filePath(),
upTo.lineRange().startLine().line(),
location.lineRange().endLine().line(),
upTo.lineRange().startLine().offset(),
location.lineRange().endLine().offset());
return trimmedLocation;
}
private Location trimRight(Location location, Location upTo) {
assert location.lineRange().endLine().line() > upTo.lineRange().endLine().line() ||
(location.lineRange().endLine().line() == upTo.lineRange().endLine().line() &&
location.lineRange().endLine().offset() >= upTo.lineRange().endLine().offset());
Location trimmedLocation = new BLangDiagnosticLocation(location.lineRange().filePath(),
location.lineRange().startLine().line(),
upTo.lineRange().endLine().line(),
location.lineRange().startLine().offset(),
upTo.lineRange().endLine().offset());
return trimmedLocation;
}
} | class BLangNodeTransformer extends NodeTransformer<BLangNode> {
private static final String IDENTIFIER_LITERAL_PREFIX = "'";
private BLangDiagnosticLog dlog;
private SymbolTable symTable;
private PackageCache packageCache;
private PackageID packageID;
private String currentCompUnitName;
private BLangCompilationUnit currentCompilationUnit;
private BLangAnonymousModelHelper anonymousModelHelper;
private BLangMissingNodesHelper missingNodesHelper;
/* To keep track of additional statements produced from multi-BLangNode resultant transformations */
private Stack<BLangStatement> additionalStatements = new Stack<>();
/* To keep track if we are inside a block statment for the use of type definition creation */
private boolean isInLocalContext = false;
public BLangNodeTransformer(CompilerContext context,
PackageID packageID, String entryName) {
this.dlog = BLangDiagnosticLog.getInstance(context);
this.dlog.setCurrentPackageId(packageID);
this.symTable = SymbolTable.getInstance(context);
this.packageID = packageID;
this.currentCompUnitName = entryName;
this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context);
this.missingNodesHelper = BLangMissingNodesHelper.getInstance(context);
}
public List<org.ballerinalang.model.tree.Node> accept(Node node) {
BLangNode bLangNode = node.apply(this);
List<org.ballerinalang.model.tree.Node> nodes = new ArrayList<>();
while (!additionalStatements.empty()) {
nodes.add(additionalStatements.pop());
}
nodes.add(bLangNode);
return nodes;
}
@Override
public BLangNode transform(IdentifierToken identifierToken) {
return this.createIdentifier(getPosition(identifierToken), identifierToken);
}
private Optional<Node> getDocumentationString(Optional<MetadataNode> metadataNode) {
return metadataNode.map(MetadataNode::documentationString).orElse(null);
}
private NodeList<AnnotationNode> getAnnotations(Optional<MetadataNode> metadataNode) {
return metadataNode.map(MetadataNode::annotations).orElse(null);
}
private Location getPosition(Node node) {
if (node == null) {
return null;
}
LineRange lineRange = node.lineRange();
LinePosition startPos = lineRange.startLine();
LinePosition endPos = lineRange.endLine();
return new BLangDiagnosticLocation(currentCompUnitName,
startPos.line(),
endPos.line(),
startPos.offset(),
endPos.offset());
}
private Location getPosition(Node startNode, Node endNode) {
if (startNode == null || endNode == null) {
return null;
}
LinePosition startPos = startNode.lineRange().startLine();
LinePosition endPos = endNode.lineRange().endLine();
return new BLangDiagnosticLocation(currentCompUnitName, startPos.line(), endPos.line(),
startPos.offset(), endPos.offset());
}
private Location getPositionWithoutMetadata(Node node) {
if (node == null) {
return null;
}
LineRange nodeLineRange = node.lineRange();
NonTerminalNode nonTerminalNode = (NonTerminalNode) node;
ChildNodeList children = nonTerminalNode.children();
LinePosition startPos;
if (children.get(0).kind() == SyntaxKind.METADATA) {
startPos = children.get(1).lineRange().startLine();
} else {
startPos = nodeLineRange.startLine();
}
LinePosition endPos = nodeLineRange.endLine();
return new BLangDiagnosticLocation(currentCompUnitName,
startPos.line(),
endPos.line(),
startPos.offset(),
endPos.offset());
}
@Override
public BLangNode transform(ModulePartNode modulePart) {
BLangCompilationUnit compilationUnit = (BLangCompilationUnit) TreeBuilder.createCompilationUnit();
this.currentCompilationUnit = compilationUnit;
compilationUnit.name = currentCompUnitName;
compilationUnit.setPackageID(packageID);
Location pos = getPosition(modulePart);
for (ImportDeclarationNode importDecl : modulePart.imports()) {
BLangImportPackage bLangImport = (BLangImportPackage) importDecl.apply(this);
bLangImport.compUnit = this.createIdentifier(pos, compilationUnit.getName());
compilationUnit.addTopLevelNode(bLangImport);
}
for (ModuleMemberDeclarationNode member : modulePart.members()) {
compilationUnit.addTopLevelNode((TopLevelNode) member.apply(this));
}
Location newLocation = new BLangDiagnosticLocation(pos.lineRange().filePath(), 0, 0, 0, 0);
compilationUnit.pos = newLocation;
compilationUnit.setPackageID(packageID);
this.currentCompilationUnit = null;
return compilationUnit;
}
@Override
public BLangNode transform(ModuleVariableDeclarationNode modVarDeclrNode) {
TypedBindingPatternNode typedBindingPattern = modVarDeclrNode.typedBindingPattern();
BindingPatternNode bindingPatternNode = typedBindingPattern.bindingPattern();
BLangVariable variable = getBLangVariableNode(bindingPatternNode);
if (modVarDeclrNode.visibilityQualifier().isPresent()) {
markVariableWithFlag(variable, Flag.PUBLIC);
}
initializeBLangVariable(variable, typedBindingPattern.typeDescriptor(), modVarDeclrNode.initializer(),
modVarDeclrNode.qualifiers());
NodeList<AnnotationNode> annotations = getAnnotations(modVarDeclrNode.metadata());
if (annotations != null) {
variable.annAttachments = applyAll(annotations);
}
variable.pos = getPositionWithoutMetadata(modVarDeclrNode);
variable.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(modVarDeclrNode.metadata()));
return variable;
}
@Override
public BLangNode transform(ImportDeclarationNode importDeclaration) {
ImportOrgNameNode orgNameNode = importDeclaration.orgName().orElse(null);
Optional<ImportPrefixNode> prefixNode = importDeclaration.prefix();
Token prefix = prefixNode.isPresent() ? prefixNode.get().prefix() : null;
Token orgName = null;
if (orgNameNode != null) {
orgName = orgNameNode.orgName();
}
String version = null;
List<BLangIdentifier> pkgNameComps = new ArrayList<>();
NodeList<IdentifierToken> names = importDeclaration.moduleName();
Location position = getPosition(importDeclaration);
names.forEach(name -> pkgNameComps.add(this.createIdentifier(getPosition(name), name.text(), null)));
BLangImportPackage importDcl = (BLangImportPackage) TreeBuilder.createImportPackageNode();
importDcl.pos = position;
importDcl.pkgNameComps = pkgNameComps;
importDcl.orgName = this.createIdentifier(getPosition(orgNameNode), orgName);
importDcl.version = this.createIdentifier(null, version);
importDcl.alias = (prefix != null) ? this.createIdentifier(getPosition(prefix), prefix)
: pkgNameComps.get(pkgNameComps.size() - 1);
return importDcl;
}
@Override
public BLangNode transform(MethodDeclarationNode methodDeclarationNode) {
BLangFunction bLFunction;
if (methodDeclarationNode.relativeResourcePath().isEmpty()) {
bLFunction = createFunctionNode(methodDeclarationNode.methodName(),
methodDeclarationNode.qualifierList(), methodDeclarationNode.methodSignature(), null);
} else {
bLFunction = createResourceFunctionNode(methodDeclarationNode.methodName(),
methodDeclarationNode.qualifierList(), methodDeclarationNode.relativeResourcePath(),
methodDeclarationNode.methodSignature(), null);
}
bLFunction.annAttachments = applyAll(getAnnotations(methodDeclarationNode.metadata()));
bLFunction.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(methodDeclarationNode.metadata()));
bLFunction.pos = getPositionWithoutMetadata(methodDeclarationNode);
return bLFunction;
}
@Override
public BLangNode transform(ResourcePathParameterNode resourcePathParameterNode) {
BLangSimpleVariable pathParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
pathParam.name = createIdentifier(resourcePathParameterNode.paramName());
BLangType typeNode = (BLangType) resourcePathParameterNode.typeDescriptor().apply(this);
pathParam.pos = getPosition(resourcePathParameterNode);
pathParam.annAttachments = applyAll(resourcePathParameterNode.annotations());
if (resourcePathParameterNode.kind() == SyntaxKind.RESOURCE_PATH_REST_PARAM) {
BLangArrayType arrayTypeNode = (BLangArrayType) TreeBuilder.createArrayTypeNode();
arrayTypeNode.elemtype = typeNode;
arrayTypeNode.dimensions = 1;
typeNode = arrayTypeNode;
}
pathParam.typeNode = typeNode;
return pathParam;
}
private BLangFunction createResourceFunctionNode(IdentifierToken accessorName,
NodeList<Token> qualifierList,
NodeList<Node> relativeResourcePath,
FunctionSignatureNode methodSignature,
FunctionBodyNode functionBody) {
BLangResourceFunction bLFunction = (BLangResourceFunction) TreeBuilder.createResourceFunctionNode();
String resourceFuncName = calculateResourceFunctionName(accessorName, relativeResourcePath);
BLangIdentifier name = createIdentifier(getPosition(accessorName), resourceFuncName);
populateFunctionNode(name, qualifierList, methodSignature, functionBody, bLFunction);
bLFunction.methodName = createIdentifier(accessorName);
bLFunction.resourcePath = new ArrayList<>();
List<BLangSimpleVariable> params = new ArrayList<>();
for (Node pathSegment : relativeResourcePath) {
switch (pathSegment.kind()) {
case SLASH_TOKEN:
continue;
case RESOURCE_PATH_SEGMENT_PARAM:
BLangSimpleVariable param = (BLangSimpleVariable) pathSegment.apply(this);
params.add(param);
bLFunction.addPathParam(param);
bLFunction.resourcePath.add(createIdentifier(getPosition(pathSegment), "*"));
break;
case RESOURCE_PATH_REST_PARAM:
BLangSimpleVariable restParam = (BLangSimpleVariable) pathSegment.apply(this);
params.add(restParam);
bLFunction.setRestPathParam(restParam);
bLFunction.resourcePath.add(createIdentifier(getPosition(pathSegment), "**"));
break;
default:
bLFunction.resourcePath.add(createIdentifier((Token) pathSegment));
break;
}
}
bLFunction.getParameters().addAll(0, params);
return bLFunction;
}
private String calculateResourceFunctionName(IdentifierToken accessorName, NodeList<Node> relativeResourcePath) {
StringBuilder sb = new StringBuilder();
sb.append("$");
sb.append(createIdentifier(accessorName).getValue());
for (Node token : relativeResourcePath) {
switch (token.kind()) {
case SLASH_TOKEN:
continue;
case RESOURCE_PATH_SEGMENT_PARAM:
sb.append("$*");
break;
case RESOURCE_PATH_REST_PARAM:
sb.append("$**");
break;
default:
sb.append("$");
String value = createIdentifier((Token) token).getValue();
sb.append(value);
}
}
return sb.toString();
}
@Override
public BLangNode transform(ConstantDeclarationNode constantDeclarationNode) {
BLangConstant constantNode = (BLangConstant) TreeBuilder.createConstantNode();
Location pos = getPositionWithoutMetadata(constantDeclarationNode);
Location identifierPos = getPosition(constantDeclarationNode.variableName());
constantNode.name = createIdentifier(identifierPos, constantDeclarationNode.variableName());
constantNode.expr = createExpression(constantDeclarationNode.initializer());
constantNode.pos = pos;
if (constantDeclarationNode.typeDescriptor().isPresent()) {
constantNode.typeNode = createTypeNode(constantDeclarationNode.typeDescriptor().orElse(null));
}
constantNode.annAttachments = applyAll(getAnnotations(constantDeclarationNode.metadata()));
constantNode.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(constantDeclarationNode.metadata()));
constantNode.flagSet.add(Flag.CONSTANT);
if (constantDeclarationNode.visibilityQualifier().isPresent() &&
constantDeclarationNode.visibilityQualifier().orElse(null).kind() == SyntaxKind.PUBLIC_KEYWORD) {
constantNode.flagSet.add(Flag.PUBLIC);
}
NodeKind nodeKind = constantNode.expr.getKind();
if (nodeKind == NodeKind.LITERAL || nodeKind == NodeKind.NUMERIC_LITERAL) {
BLangLiteral literal = nodeKind == NodeKind.LITERAL ?
(BLangLiteral) TreeBuilder.createLiteralExpression() :
(BLangLiteral) TreeBuilder.createNumericLiteralExpression();
literal.setValue(((BLangLiteral) constantNode.expr).value);
literal.type = constantNode.expr.type;
literal.isConstant = true;
BLangFiniteTypeNode finiteTypeNode = (BLangFiniteTypeNode) TreeBuilder.createFiniteTypeNode();
finiteTypeNode.valueSpace.add(literal);
BLangTypeDefinition typeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
String genName = anonymousModelHelper.getNextAnonymousTypeKey(packageID);
IdentifierNode anonTypeGenName = createIdentifier(identifierPos, genName);
typeDef.setName(anonTypeGenName);
typeDef.flagSet.add(Flag.PUBLIC);
typeDef.flagSet.add(Flag.ANONYMOUS);
typeDef.typeNode = finiteTypeNode;
typeDef.pos = pos;
constantNode.associatedTypeDefinition = typeDef;
}
return constantNode;
}
public BLangNode transform(TypeDefinitionNode typeDefNode) {
BLangTypeDefinition typeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
BLangIdentifier identifierNode =
this.createIdentifier(typeDefNode.typeName());
typeDef.setName(identifierNode);
typeDef.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(typeDefNode.metadata()));
typeDef.typeNode = createTypeNode(typeDefNode.typeDescriptor());
typeDefNode.visibilityQualifier().ifPresent(visibilityQual -> {
if (visibilityQual.kind() == SyntaxKind.PUBLIC_KEYWORD) {
typeDef.flagSet.add(Flag.PUBLIC);
}
});
typeDef.pos = getPositionWithoutMetadata(typeDefNode);
typeDef.annAttachments = applyAll(getAnnotations(typeDefNode.metadata()));
return typeDef;
}
@Override
public BLangNode transform(UnionTypeDescriptorNode unionTypeDescriptorNode) {
List<TypeDescriptorNode> nodes = flattenUnionType(unionTypeDescriptorNode);
List<TypeDescriptorNode> finiteTypeElements = new ArrayList<>();
List<List<TypeDescriptorNode>> unionTypeElementsCollection = new ArrayList<>();
for (TypeDescriptorNode type : nodes) {
if (type.kind() == SyntaxKind.SINGLETON_TYPE_DESC) {
finiteTypeElements.add(type);
unionTypeElementsCollection.add(new ArrayList<>());
} else {
List<TypeDescriptorNode> lastOfOthers;
if (unionTypeElementsCollection.isEmpty()) {
lastOfOthers = new ArrayList<>();
unionTypeElementsCollection.add(lastOfOthers);
} else {
lastOfOthers = unionTypeElementsCollection.get(unionTypeElementsCollection.size() - 1);
}
lastOfOthers.add(type);
}
}
List<TypeDescriptorNode> unionElements = new ArrayList<>();
reverseFlatMap(unionTypeElementsCollection, unionElements);
BLangFiniteTypeNode bLangFiniteTypeNode = (BLangFiniteTypeNode) TreeBuilder.createFiniteTypeNode();
for (TypeDescriptorNode finiteTypeEl : finiteTypeElements) {
SingletonTypeDescriptorNode singletonTypeNode = (SingletonTypeDescriptorNode) finiteTypeEl;
BLangLiteral literal = createSimpleLiteral(singletonTypeNode.simpleContExprNode(), true);
bLangFiniteTypeNode.addValue(literal);
}
if (unionElements.isEmpty()) {
return bLangFiniteTypeNode;
}
BLangUnionTypeNode unionTypeNode = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode();
unionTypeNode.pos = getPosition(unionTypeDescriptorNode);
for (TypeDescriptorNode unionElement : unionElements) {
unionTypeNode.memberTypeNodes.add(createTypeNode(unionElement));
}
if (!finiteTypeElements.isEmpty()) {
unionTypeNode.memberTypeNodes.add(deSugarTypeAsUserDefType(bLangFiniteTypeNode));
}
return unionTypeNode;
}
private List<TypeDescriptorNode> flattenUnionType(UnionTypeDescriptorNode unionTypeDescriptorNode) {
List<TypeDescriptorNode> list = new ArrayList<>();
list.add(unionTypeDescriptorNode.leftTypeDesc());
while (unionTypeDescriptorNode.rightTypeDesc().kind() == SyntaxKind.UNION_TYPE_DESC) {
unionTypeDescriptorNode = (UnionTypeDescriptorNode) unionTypeDescriptorNode.rightTypeDesc();
list.add(unionTypeDescriptorNode.leftTypeDesc());
}
list.add(unionTypeDescriptorNode.rightTypeDesc());
return list;
}
private <T> void reverseFlatMap(List<List<T>> listOfLists, List<T> result) {
for (int i = listOfLists.size() - 1; i >= 0; i--) {
result.addAll(listOfLists.get(i));
}
}
private BLangUserDefinedType deSugarTypeAsUserDefType(BLangType toIndirect) {
BLangTypeDefinition bLTypeDef = createTypeDefinitionWithTypeNode(toIndirect);
Location pos = toIndirect.pos;
addToTop(bLTypeDef);
return createUserDefinedType(pos, (BLangIdentifier) TreeBuilder.createIdentifierNode(), bLTypeDef.name);
}
private BLangTypeDefinition createTypeDefinitionWithTypeNode(BLangType toIndirect) {
Location pos = toIndirect.pos;
BLangTypeDefinition bLTypeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
String genName = anonymousModelHelper.getNextAnonymousTypeKey(packageID);
IdentifierNode anonTypeGenName = createIdentifier(symTable.builtinPos, genName);
bLTypeDef.setName(anonTypeGenName);
bLTypeDef.flagSet.add(Flag.PUBLIC);
bLTypeDef.flagSet.add(Flag.ANONYMOUS);
bLTypeDef.typeNode = toIndirect;
bLTypeDef.pos = pos;
return bLTypeDef;
}
@Override
public BLangNode transform(ParenthesisedTypeDescriptorNode parenthesisedTypeDescriptorNode) {
BLangType typeNode = createTypeNode(parenthesisedTypeDescriptorNode.typedesc());
typeNode.grouped = true;
return typeNode;
}
@Override
public BLangNode transform(TypeParameterNode typeParameterNode) {
return createTypeNode(typeParameterNode.typeNode());
}
@Override
public BLangNode transform(TupleTypeDescriptorNode tupleTypeDescriptorNode) {
BLangTupleTypeNode tupleTypeNode = (BLangTupleTypeNode) TreeBuilder.createTupleTypeNode();
SeparatedNodeList<Node> types = tupleTypeDescriptorNode.memberTypeDesc();
for (int i = 0; i < types.size(); i++) {
Node node = types.get(i);
if (node.kind() == SyntaxKind.REST_TYPE) {
RestDescriptorNode restDescriptor = (RestDescriptorNode) node;
tupleTypeNode.restParamType = createTypeNode(restDescriptor.typeDescriptor());
} else {
tupleTypeNode.memberTypeNodes.add(createTypeNode(node));
}
}
tupleTypeNode.pos = getPosition(tupleTypeDescriptorNode);
return tupleTypeNode;
}
@Override
private boolean isAnonymousTypeNode(TypeParameterNode typeNode) {
SyntaxKind paramKind = typeNode.typeNode().kind();
if (paramKind == SyntaxKind.RECORD_TYPE_DESC || paramKind == SyntaxKind.OBJECT_TYPE_DESC
|| paramKind == SyntaxKind.ERROR_TYPE_DESC) {
return checkIfAnonymous(typeNode);
}
return false;
}
@Override
public BLangNode transform(DistinctTypeDescriptorNode distinctTypeDesc) {
BLangType typeNode = createTypeNode(distinctTypeDesc.typeDescriptor());
typeNode.flagSet.add(Flag.DISTINCT);
return typeNode;
}
@Override
public BLangNode transform(ObjectTypeDescriptorNode objTypeDescNode) {
BLangObjectTypeNode objectTypeNode = (BLangObjectTypeNode) TreeBuilder.createObjectTypeNode();
for (Token qualifier : objTypeDescNode.objectTypeQualifiers()) {
SyntaxKind kind = qualifier.kind();
if (kind == SyntaxKind.CLIENT_KEYWORD) {
objectTypeNode.flagSet.add(Flag.CLIENT);
continue;
}
if (kind == SyntaxKind.SERVICE_KEYWORD) {
objectTypeNode.flagSet.add(SERVICE);
continue;
}
if (kind == SyntaxKind.ISOLATED_KEYWORD) {
objectTypeNode.flagSet.add(ISOLATED);
continue;
}
throw new RuntimeException("Syntax kind is not supported: " + kind);
}
NodeList<Node> members = objTypeDescNode.members();
for (Node node : members) {
BLangNode bLangNode = node.apply(this);
if (bLangNode.getKind() == NodeKind.FUNCTION) {
BLangFunction bLangFunction = (BLangFunction) bLangNode;
bLangFunction.attachedFunction = true;
bLangFunction.flagSet.add(Flag.ATTACHED);
if (Names.USER_DEFINED_INIT_SUFFIX.value.equals(bLangFunction.name.value)) {
if (objectTypeNode.initFunction == null) {
bLangFunction.objInitFunction = true;
objectTypeNode.initFunction = bLangFunction;
} else {
objectTypeNode.addFunction(bLangFunction);
}
} else {
objectTypeNode.addFunction(bLangFunction);
}
} else if (bLangNode.getKind() == NodeKind.RESOURCE_FUNC) {
BLangFunction bLangFunction = (BLangFunction) bLangNode;
bLangFunction.attachedFunction = true;
bLangFunction.flagSet.add(Flag.ATTACHED);
objectTypeNode.addFunction(bLangFunction);
dlog.error(getPosition(node), DiagnosticErrorCode.OBJECT_TYPE_DEF_DOES_NOT_ALLOW_RESOURCE_FUNC_DECL);
} else if (bLangNode.getKind() == NodeKind.VARIABLE) {
objectTypeNode.addField((BLangSimpleVariable) bLangNode);
} else if (bLangNode.getKind() == NodeKind.USER_DEFINED_TYPE) {
objectTypeNode.addTypeReference((BLangType) bLangNode);
}
}
objectTypeNode.pos = getPosition(objTypeDescNode);
if (members.size() > 0) {
objectTypeNode.pos = trimLeft(objectTypeNode.pos, getPosition(members.get(0)));
objectTypeNode.pos = trimRight(objectTypeNode.pos, getPosition(members.get(members.size() - 1)));
} else {
objectTypeNode.pos = trimLeft(objectTypeNode.pos, getPosition(objTypeDescNode.closeBrace()));
objectTypeNode.pos = trimRight(objectTypeNode.pos, getPosition(objTypeDescNode.openBrace()));
}
boolean isAnonymous = checkIfAnonymous(objTypeDescNode);
objectTypeNode.isAnonymous = isAnonymous;
if (!isAnonymous) {
return objectTypeNode;
}
return deSugarTypeAsUserDefType(objectTypeNode);
}
public BLangClassDefinition transformObjectCtorExpressionBody(NodeList<Node> members) {
BLangClassDefinition classDefinition = (BLangClassDefinition) TreeBuilder.createClassDefNode();
classDefinition.flagSet.add(Flag.ANONYMOUS);
classDefinition.flagSet.add(Flag.OBJECT_CTOR);
for (Node node : members) {
BLangNode bLangNode = node.apply(this);
NodeKind nodeKind = bLangNode.getKind();
if (nodeKind == NodeKind.FUNCTION || bLangNode.getKind() == NodeKind.RESOURCE_FUNC) {
BLangFunction bLangFunction = (BLangFunction) bLangNode;
bLangFunction.attachedFunction = true;
bLangFunction.flagSet.add(Flag.ATTACHED);
if (!Names.USER_DEFINED_INIT_SUFFIX.value.equals(bLangFunction.name.value)) {
classDefinition.addFunction(bLangFunction);
continue;
}
if (classDefinition.initFunction != null) {
classDefinition.addFunction(bLangFunction);
continue;
}
if (bLangFunction.requiredParams.size() != 0) {
dlog.error(bLangFunction.pos, DiagnosticErrorCode.OBJECT_CTOR_INIT_CANNOT_HAVE_PARAMETERS);
continue;
}
bLangFunction.objInitFunction = true;
classDefinition.initFunction = bLangFunction;
} else if (nodeKind == NodeKind.VARIABLE) {
classDefinition.addField((BLangSimpleVariable) bLangNode);
} else if (nodeKind == NodeKind.USER_DEFINED_TYPE) {
dlog.error(bLangNode.pos, DiagnosticErrorCode.OBJECT_CTOR_DOES_NOT_SUPPORT_TYPE_REFERENCE_MEMBERS);
}
}
classDefinition.internal = true;
return classDefinition;
}
/**
* Object constructor expression creates a class definition for the type defined through the object constructor.
* Then add the class definition as a top level node. Using the class definition initialize the object defined in
* the object constructor. Therefore this can be considered as a desugar.
* example:
* var objVariable = object { int n; };
*
* class anonType0 { int n; }
* var objVariable = new anonType0();
*
* @param objectConstructorExpressionNode object ctor expression node
* @return BLangTypeInit node which initialize the class definition
*/
@Override
public BLangNode transform(ObjectConstructorExpressionNode objectConstructorExpressionNode) {
Location pos = getPositionWithoutMetadata(objectConstructorExpressionNode);
BLangClassDefinition anonClass = transformObjectCtorExpressionBody(objectConstructorExpressionNode.members());
anonClass.pos = pos;
BLangObjectConstructorExpression objectCtorExpression = TreeBuilder.createObjectCtorExpression();
objectCtorExpression.pos = pos;
objectCtorExpression.classNode = anonClass;
String genName = anonymousModelHelper.getNextAnonymousTypeKey(packageID);
IdentifierNode anonTypeGenName = createIdentifier(pos, genName);
anonClass.setName(anonTypeGenName);
anonClass.flagSet.add(Flag.PUBLIC);
Optional<TypeDescriptorNode> typeReference = objectConstructorExpressionNode.typeReference();
typeReference.ifPresent(typeReferenceNode -> {
objectCtorExpression.addTypeReference(createTypeNode(typeReferenceNode));
});
anonClass.annAttachments = applyAll(objectConstructorExpressionNode.annotations());
addToTop(anonClass);
NodeList<Token> objectConstructorQualifierList = objectConstructorExpressionNode.objectTypeQualifiers();
for (Token qualifier : objectConstructorQualifierList) {
SyntaxKind kind = qualifier.kind();
if (kind == SyntaxKind.CLIENT_KEYWORD) {
anonClass.flagSet.add(Flag.CLIENT);
objectCtorExpression.isClient = true;
} else if (kind == SyntaxKind.ISOLATED_KEYWORD) {
anonClass.flagSet.add(Flag.ISOLATED);
} else if (qualifier.kind() == SyntaxKind.SERVICE_KEYWORD) {
anonClass.flagSet.add(SERVICE);
objectCtorExpression.isService = true;
} else {
throw new RuntimeException("Syntax kind is not supported: " + kind);
}
}
BLangIdentifier identifier = (BLangIdentifier) TreeBuilder.createIdentifierNode();
BLangUserDefinedType userDefinedType = createUserDefinedType(pos, identifier, anonClass.name);
BLangTypeInit initNode = (BLangTypeInit) TreeBuilder.createInitNode();
initNode.pos = pos;
initNode.userDefinedType = userDefinedType;
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier pkgAlias = createIdentifier(pos, "");
BLangNameReference nameReference = new BLangNameReference(pos, null, pkgAlias, createIdentifier(pos, genName));
invocationNode.name = (BLangIdentifier) nameReference.name;
invocationNode.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
initNode.argsExpr.addAll(invocationNode.argExprs);
initNode.initInvocation = invocationNode;
objectCtorExpression.typeInit = initNode;
return objectCtorExpression;
}
@Override
public BLangNode transform(ObjectFieldNode objFieldNode) {
BLangSimpleVariable simpleVar = createSimpleVar(objFieldNode.fieldName(), objFieldNode.typeName(),
objFieldNode.expression().orElse(null), objFieldNode.visibilityQualifier().orElse(null),
getAnnotations(objFieldNode.metadata()));
Optional<Node> doc = getDocumentationString(objFieldNode.metadata());
simpleVar.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(doc);
NodeList<Token> qualifierList = objFieldNode.qualifierList();
for (Token token : qualifierList) {
if (token.kind() == SyntaxKind.FINAL_KEYWORD) {
addFinalQualifier(simpleVar);
} else if (token.kind() == SyntaxKind.RESOURCE_KEYWORD) {
addResourceQualifier(simpleVar);
}
}
simpleVar.flagSet.add(Flag.FIELD);
simpleVar.pos = getPositionWithoutMetadata(objFieldNode);
return simpleVar;
}
private void addResourceQualifier(BLangSimpleVariable simpleVar) {
simpleVar.flagSet.add(Flag.RESOURCE);
}
@Override
public BLangNode transform(ExpressionFunctionBodyNode expressionFunctionBodyNode) {
BLangExprFunctionBody bLExprFunctionBody = (BLangExprFunctionBody) TreeBuilder.createExprFunctionBodyNode();
bLExprFunctionBody.expr = createExpression(expressionFunctionBodyNode.expression());
bLExprFunctionBody.pos = getPosition(expressionFunctionBodyNode);
return bLExprFunctionBody;
}
@Override
public BLangNode transform(RecordTypeDescriptorNode recordTypeDescriptorNode) {
BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) TreeBuilder.createRecordTypeNode();
boolean hasRestField = false;
boolean isAnonymous = checkIfAnonymous(recordTypeDescriptorNode);
for (Node field : recordTypeDescriptorNode.fields()) {
if (field.kind() == SyntaxKind.RECORD_FIELD) {
BLangSimpleVariable bLFiled = (BLangSimpleVariable) field.apply(this);
Optional<Node> doc = getDocumentationString(((RecordFieldNode) field).metadata());
bLFiled.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(doc);
recordTypeNode.fields.add(bLFiled);
} else if (field.kind() == SyntaxKind.RECORD_FIELD_WITH_DEFAULT_VALUE) {
BLangSimpleVariable bLFiled = (BLangSimpleVariable) field.apply(this);
Optional<Node> doc = getDocumentationString(((RecordFieldWithDefaultValueNode) field).metadata());
bLFiled.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(doc);
recordTypeNode.fields.add(bLFiled);
} else {
recordTypeNode.addTypeReference(createTypeNode(field));
}
}
Optional<RecordRestDescriptorNode> recordRestDesc = recordTypeDescriptorNode.recordRestDescriptor();
if (recordRestDesc.isPresent()) {
recordTypeNode.restFieldType = createTypeNode(recordRestDesc.get());
hasRestField = true;
}
boolean isOpen = recordTypeDescriptorNode.bodyStartDelimiter().kind() == SyntaxKind.OPEN_BRACE_TOKEN;
recordTypeNode.sealed = !(hasRestField || isOpen);
recordTypeNode.pos = getPosition(recordTypeDescriptorNode);
recordTypeNode.isAnonymous = isAnonymous;
recordTypeNode.isLocal = this.isInLocalContext;
if (!isAnonymous || this.isInLocalContext) {
return recordTypeNode;
}
return createAnonymousRecordType(recordTypeDescriptorNode, recordTypeNode);
}
@Override
public BLangNode transform(SingletonTypeDescriptorNode singletonTypeDescriptorNode) {
BLangFiniteTypeNode bLangFiniteTypeNode = new BLangFiniteTypeNode();
BLangLiteral simpleLiteral = createSimpleLiteral(singletonTypeDescriptorNode.simpleContExprNode());
bLangFiniteTypeNode.valueSpace.add(simpleLiteral);
return bLangFiniteTypeNode;
}
@Override
public BLangNode transform(BuiltinSimpleNameReferenceNode singletonTypeDescriptorNode) {
return createTypeNode(singletonTypeDescriptorNode);
}
@Override
public BLangNode transform(TypeReferenceNode typeReferenceNode) {
return createTypeNode(typeReferenceNode.typeName());
}
@Override
public BLangNode transform(RecordFieldNode recordFieldNode) {
BLangSimpleVariable simpleVar = createSimpleVar(recordFieldNode.fieldName(), recordFieldNode.typeName(),
getAnnotations(recordFieldNode.metadata()));
simpleVar.flagSet.add(Flag.PUBLIC);
if (recordFieldNode.questionMarkToken().isPresent()) {
simpleVar.flagSet.add(Flag.OPTIONAL);
} else {
simpleVar.flagSet.add(Flag.REQUIRED);
}
simpleVar.flagSet.add(Flag.FIELD);
addReadOnlyQualifier(recordFieldNode.readonlyKeyword(), simpleVar);
simpleVar.pos = getPositionWithoutMetadata(recordFieldNode);
return simpleVar;
}
@Override
public BLangNode transform(RecordFieldWithDefaultValueNode recordFieldNode) {
BLangSimpleVariable simpleVar = createSimpleVar(recordFieldNode.fieldName(), recordFieldNode.typeName(),
getAnnotations(recordFieldNode.metadata()));
simpleVar.flagSet.add(Flag.PUBLIC);
if (isPresent(recordFieldNode.expression())) {
simpleVar.setInitialExpression(createExpression(recordFieldNode.expression()));
}
addReadOnlyQualifier(recordFieldNode.readonlyKeyword(), simpleVar);
simpleVar.pos = getPositionWithoutMetadata(recordFieldNode);
return simpleVar;
}
private void addReadOnlyQualifier(Optional<Token> readonlyKeyword, BLangSimpleVariable simpleVar) {
if (readonlyKeyword.isPresent()) {
simpleVar.flagSet.add(Flag.READONLY);
}
}
@Override
public BLangNode transform(RecordRestDescriptorNode recordFieldNode) {
return createTypeNode(recordFieldNode.typeName());
}
@Override
public BLangNode transform(FunctionDefinitionNode funcDefNode) {
BLangFunction bLFunction;
if (funcDefNode.relativeResourcePath().isEmpty()) {
bLFunction = createFunctionNode(funcDefNode.functionName(), funcDefNode.qualifierList(),
funcDefNode.functionSignature(), funcDefNode.functionBody());
} else {
bLFunction = createResourceFunctionNode(funcDefNode.functionName(),
funcDefNode.qualifierList(), funcDefNode.relativeResourcePath(),
funcDefNode.functionSignature(), funcDefNode.functionBody());
}
bLFunction.annAttachments = applyAll(getAnnotations(funcDefNode.metadata()));
bLFunction.pos = getPositionWithoutMetadata(funcDefNode);
bLFunction.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(funcDefNode.metadata()));
return bLFunction;
}
private BLangFunction createFunctionNode(IdentifierToken funcName, NodeList<Token> qualifierList,
FunctionSignatureNode functionSignature, FunctionBodyNode functionBody) {
BLangFunction bLFunction = (BLangFunction) TreeBuilder.createFunctionNode();
BLangIdentifier name = createIdentifier(getPosition(funcName), funcName);
populateFunctionNode(name, qualifierList, functionSignature, functionBody, bLFunction);
return bLFunction;
}
private void populateFunctionNode(BLangIdentifier name, NodeList<Token> qualifierList,
FunctionSignatureNode functionSignature, FunctionBodyNode functionBody,
BLangFunction bLFunction) {
bLFunction.name = name;
setFunctionQualifiers(bLFunction, qualifierList);
populateFuncSignature(bLFunction, functionSignature);
if (functionBody == null) {
bLFunction.body = null;
bLFunction.flagSet.add(Flag.INTERFACE);
bLFunction.interfaceFunction = true;
} else {
bLFunction.body = (BLangFunctionBody) functionBody.apply(this);
if (bLFunction.body.getKind() == NodeKind.EXTERN_FUNCTION_BODY) {
bLFunction.flagSet.add(Flag.NATIVE);
}
}
}
private void setFunctionQualifiers(BLangFunction bLFunction, NodeList<Token> qualifierList) {
for (Token qualifier : qualifierList) {
switch (qualifier.kind()) {
case PUBLIC_KEYWORD:
bLFunction.flagSet.add(Flag.PUBLIC);
break;
case PRIVATE_KEYWORD:
bLFunction.flagSet.add(Flag.PRIVATE);
break;
case REMOTE_KEYWORD:
bLFunction.flagSet.add(Flag.REMOTE);
break;
case TRANSACTIONAL_KEYWORD:
bLFunction.flagSet.add(Flag.TRANSACTIONAL);
break;
case RESOURCE_KEYWORD:
bLFunction.flagSet.add(Flag.RESOURCE);
break;
case ISOLATED_KEYWORD:
bLFunction.flagSet.add(Flag.ISOLATED);
break;
default:
continue;
}
}
}
@Override
public BLangNode transform(ExternalFunctionBodyNode externalFunctionBodyNode) {
BLangExternalFunctionBody externFunctionBodyNode =
(BLangExternalFunctionBody) TreeBuilder.createExternFunctionBodyNode();
externFunctionBodyNode.annAttachments = applyAll(externalFunctionBodyNode.annotations());
return externFunctionBodyNode;
}
@Override
public BLangNode transform(ExplicitAnonymousFunctionExpressionNode anonFuncExprNode) {
BLangFunction bLFunction = (BLangFunction) TreeBuilder.createFunctionNode();
Location pos = getPosition(anonFuncExprNode);
bLFunction.name = createIdentifier(symTable.builtinPos,
anonymousModelHelper.getNextAnonymousFunctionKey(packageID));
populateFuncSignature(bLFunction, anonFuncExprNode.functionSignature());
bLFunction.body = (BLangFunctionBody) anonFuncExprNode.functionBody().apply(this);
bLFunction.pos = pos;
bLFunction.addFlag(Flag.LAMBDA);
bLFunction.addFlag(Flag.ANONYMOUS);
setFunctionQualifiers(bLFunction, anonFuncExprNode.qualifierList());
addToTop(bLFunction);
BLangLambdaFunction lambdaExpr = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaExpr.function = bLFunction;
lambdaExpr.pos = pos;
return lambdaExpr;
}
@Override
public BLangNode transform(FunctionBodyBlockNode functionBodyBlockNode) {
BLangBlockFunctionBody bLFuncBody = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
this.isInLocalContext = true;
List<BLangStatement> statements = new ArrayList<>();
if (functionBodyBlockNode.namedWorkerDeclarator().isPresent()) {
NamedWorkerDeclarator namedWorkerDeclarator = functionBodyBlockNode.namedWorkerDeclarator().get();
generateAndAddBLangStatements(namedWorkerDeclarator.workerInitStatements(), statements);
for (NamedWorkerDeclarationNode workerDeclarationNode : namedWorkerDeclarator.namedWorkerDeclarations()) {
statements.add((BLangStatement) workerDeclarationNode.apply(this));
while (!this.additionalStatements.empty()) {
statements.add(additionalStatements.pop());
}
}
}
generateAndAddBLangStatements(functionBodyBlockNode.statements(), statements);
bLFuncBody.stmts = statements;
bLFuncBody.pos = getPosition(functionBodyBlockNode);
this.isInLocalContext = false;
return bLFuncBody;
}
@Override
public BLangNode transform(ForEachStatementNode forEachStatementNode) {
BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();
foreach.pos = getPosition(forEachStatementNode);
TypedBindingPatternNode typedBindingPatternNode = forEachStatementNode.typedBindingPattern();
VariableDefinitionNode variableDefinitionNode = createBLangVarDef(getPosition(typedBindingPatternNode),
typedBindingPatternNode, Optional.empty(), Optional.empty());
foreach.setVariableDefinitionNode(variableDefinitionNode);
foreach.isDeclaredWithVar = typedBindingPatternNode.typeDescriptor().kind() == SyntaxKind.VAR_TYPE_DESC;
BLangBlockStmt foreachBlock = (BLangBlockStmt) forEachStatementNode.blockStatement().apply(this);
foreachBlock.pos = getPosition(forEachStatementNode.blockStatement());
foreach.setBody(foreachBlock);
foreach.setCollection(createExpression(forEachStatementNode.actionOrExpressionNode()));
forEachStatementNode.onFailClause().ifPresent(onFailClauseNode -> {
foreach.setOnFailClause(
(org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this)));
});
return foreach;
}
@Override
public BLangNode transform(ForkStatementNode forkStatementNode) {
BLangForkJoin forkJoin = (BLangForkJoin) TreeBuilder.createForkJoinNode();
Location forkStmtPos = getPosition(forkStatementNode);
forkJoin.pos = forkStmtPos;
return forkJoin;
}
@Override
public BLangNode transform(NamedWorkerDeclarationNode namedWorkerDeclNode) {
BLangFunction bLFunction = (BLangFunction) TreeBuilder.createFunctionNode();
Location workerBodyPos = getPosition(namedWorkerDeclNode.workerBody());
bLFunction.name = createIdentifier(symTable.builtinPos,
anonymousModelHelper.getNextAnonymousFunctionKey(packageID));
BLangBlockStmt blockStmt = (BLangBlockStmt) namedWorkerDeclNode.workerBody().apply(this);
BLangBlockFunctionBody bodyNode = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
bodyNode.stmts = blockStmt.stmts;
bodyNode.pos = workerBodyPos;
bLFunction.body = bodyNode;
bLFunction.internal = true;
bLFunction.pos = workerBodyPos;
bLFunction.addFlag(Flag.LAMBDA);
bLFunction.addFlag(Flag.ANONYMOUS);
bLFunction.addFlag(Flag.WORKER);
if (namedWorkerDeclNode.transactionalKeyword().isPresent()) {
bLFunction.addFlag(Flag.TRANSACTIONAL);
}
String workerName;
if (namedWorkerDeclNode.workerName().isMissing()) {
workerName = missingNodesHelper.getNextMissingNodeName(packageID);
} else {
workerName = namedWorkerDeclNode.workerName().text();
}
if (workerName.startsWith(IDENTIFIER_LITERAL_PREFIX)) {
bLFunction.defaultWorkerName.originalValue = workerName;
workerName = IdentifierUtils.unescapeUnicodeCodepoints(workerName.substring(1));
}
bLFunction.defaultWorkerName.value = workerName;
bLFunction.defaultWorkerName.pos = getPosition(namedWorkerDeclNode.workerName());
NodeList<AnnotationNode> annotations = namedWorkerDeclNode.annotations();
bLFunction.annAttachments = applyAll(annotations);
Optional<Node> retNode = namedWorkerDeclNode.returnTypeDesc();
if (retNode.isPresent()) {
ReturnTypeDescriptorNode returnType = (ReturnTypeDescriptorNode) retNode.get();
bLFunction.setReturnTypeNode(createTypeNode(returnType.type()));
} else {
BLangValueType bLValueType = (BLangValueType) TreeBuilder.createValueTypeNode();
bLValueType.pos = getPosition(namedWorkerDeclNode);
bLValueType.typeKind = TypeKind.NIL;
bLFunction.setReturnTypeNode(bLValueType);
}
addToTop(bLFunction);
BLangLambdaFunction lambdaExpr = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaExpr.function = bLFunction;
lambdaExpr.pos = workerBodyPos;
lambdaExpr.internal = true;
String workerLambdaName = WORKER_LAMBDA_VAR_PREFIX + workerName;
Location workerNamePos = getPosition(namedWorkerDeclNode.workerName());
BLangSimpleVariable var = new SimpleVarBuilder()
.with(workerLambdaName, workerNamePos)
.setExpression(lambdaExpr)
.isDeclaredWithVar()
.isFinal()
.build();
if (namedWorkerDeclNode.transactionalKeyword().isPresent()) {
var.addFlag(Flag.TRANSACTIONAL);
}
BLangSimpleVariableDef lamdaWrkr = (BLangSimpleVariableDef) TreeBuilder.createSimpleVariableDefinitionNode();
lamdaWrkr.pos = workerBodyPos;
var.pos = workerBodyPos;
lamdaWrkr.setVariable(var);
lamdaWrkr.isWorker = true;
lamdaWrkr.internal = var.internal = true;
if (namedWorkerDeclNode.parent().kind() == SyntaxKind.FORK_STATEMENT) {
lamdaWrkr.isInFork = true;
lamdaWrkr.var.flagSet.add(Flag.FORKED);
}
BLangInvocation bLInvocation = (BLangInvocation) TreeBuilder.createActionInvocation();
BLangIdentifier nameInd = this.createIdentifier(workerNamePos, workerLambdaName);
BLangNameReference reference = new BLangNameReference(workerNamePos, null, TreeBuilder.createIdentifierNode(),
nameInd);
bLInvocation.pkgAlias = (BLangIdentifier) reference.pkgAlias;
bLInvocation.name = (BLangIdentifier) reference.name;
bLInvocation.pos = workerNamePos;
bLInvocation.flagSet = new HashSet<>();
bLInvocation.annAttachments = bLFunction.annAttachments;
if (bLInvocation.getKind() == NodeKind.INVOCATION) {
bLInvocation.async = true;
} else {
dlog.error(workerBodyPos, DiagnosticErrorCode.START_REQUIRE_INVOCATION);
}
BLangSimpleVariable invoc = new SimpleVarBuilder()
.with(workerName, workerNamePos)
.isDeclaredWithVar()
.isWorkerVar()
.setExpression(bLInvocation)
.isFinal()
.setPos(workerNamePos)
.build();
BLangSimpleVariableDef workerInvoc = (BLangSimpleVariableDef) TreeBuilder.createSimpleVariableDefinitionNode();
workerInvoc.pos = workerNamePos;
workerInvoc.setVariable(invoc);
workerInvoc.isWorker = true;
invoc.flagSet.add(Flag.WORKER);
this.additionalStatements.push(workerInvoc);
return lamdaWrkr;
}
private <A extends BLangNode, B extends Node> List<A> applyAll(NodeList<B> annotations) {
ArrayList<A> annAttachments = new ArrayList<>();
if (annotations == null) {
return annAttachments;
}
for (B annotation : annotations) {
A blNode = (A) annotation.apply(this);
annAttachments.add(blNode);
}
return annAttachments;
}
@Override
public BLangNode transform(AnnotationNode annotation) {
Node name = annotation.annotReference();
BLangAnnotationAttachment bLAnnotationAttachment =
(BLangAnnotationAttachment) TreeBuilder.createAnnotAttachmentNode();
if (annotation.annotValue().isPresent()) {
MappingConstructorExpressionNode map = annotation.annotValue().get();
BLangExpression bLExpression = (BLangExpression) map.apply(this);
bLAnnotationAttachment.setExpression(bLExpression);
}
BLangNameReference nameReference = createBLangNameReference(name);
bLAnnotationAttachment.setAnnotationName(nameReference.name);
bLAnnotationAttachment.setPackageAlias(nameReference.pkgAlias);
bLAnnotationAttachment.pos = getPosition(annotation);
return bLAnnotationAttachment;
}
@Override
public BLangNode transform(QueryActionNode queryActionNode) {
BLangQueryAction bLQueryAction = (BLangQueryAction) TreeBuilder.createQueryActionNode();
BLangDoClause doClause = (BLangDoClause) TreeBuilder.createDoClauseNode();
doClause.body = (BLangBlockStmt) queryActionNode.blockStatement().apply(this);
doClause.body.pos = expandLeft(doClause.body.pos, getPosition(queryActionNode.doKeyword()));
doClause.pos = doClause.body.pos;
bLQueryAction.queryClauseList.add(queryActionNode.queryPipeline().fromClause().apply(this));
bLQueryAction.queryClauseList.addAll(applyAll(queryActionNode.queryPipeline().intermediateClauses()));
bLQueryAction.queryClauseList.add(doClause);
bLQueryAction.doClause = doClause;
bLQueryAction.pos = getPosition(queryActionNode);
return bLQueryAction;
}
@Override
public BLangNode transform(AnnotationDeclarationNode annotationDeclarationNode) {
BLangAnnotation annotationDecl = (BLangAnnotation) TreeBuilder.createAnnotationNode();
Location pos = getPositionWithoutMetadata(annotationDeclarationNode);
annotationDecl.pos = pos;
annotationDecl.name = createIdentifier(annotationDeclarationNode.annotationTag());
if (annotationDeclarationNode.visibilityQualifier().isPresent()) {
annotationDecl.addFlag(Flag.PUBLIC);
}
if (annotationDeclarationNode.constKeyword().isPresent()) {
annotationDecl.addFlag(Flag.CONSTANT);
}
annotationDecl.annAttachments = applyAll(getAnnotations(annotationDeclarationNode.metadata()));
annotationDecl.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(annotationDeclarationNode.metadata()));
Optional<Node> typedesc = annotationDeclarationNode.typeDescriptor();
if (typedesc.isPresent()) {
annotationDecl.typeNode = createTypeNode(typedesc.get());
}
SeparatedNodeList<Node> paramList = annotationDeclarationNode.attachPoints();
for (Node child : paramList) {
AnnotationAttachPointNode attachPoint = (AnnotationAttachPointNode) child;
boolean source = attachPoint.sourceKeyword().isPresent();
AttachPoint bLAttachPoint;
NodeList<Token> idents = attachPoint.identifiers();
Token firstIndent = idents.get(0);
switch (firstIndent.kind()) {
case OBJECT_KEYWORD:
Token secondIndent = idents.get(1);
switch (secondIndent.kind()) {
case FUNCTION_KEYWORD:
bLAttachPoint =
AttachPoint.getAttachmentPoint(AttachPoint.Point.OBJECT_METHOD.getValue(), source);
break;
case FIELD_KEYWORD:
bLAttachPoint =
AttachPoint.getAttachmentPoint(AttachPoint.Point.OBJECT_FIELD.getValue(), source);
break;
default:
throw new RuntimeException("Syntax kind is not supported: " + secondIndent.kind());
}
break;
case SERVICE_KEYWORD:
String value;
if (idents.size() == 1) {
value = AttachPoint.Point.SERVICE.getValue();
} else if (idents.size() == 3) {
value = AttachPoint.Point.SERVICE_REMOTE.getValue();
} else {
throw new RuntimeException("Invalid annotation attach point");
}
bLAttachPoint = AttachPoint.getAttachmentPoint(value, source);
break;
case RECORD_KEYWORD:
bLAttachPoint = AttachPoint.getAttachmentPoint(AttachPoint.Point.RECORD_FIELD.getValue(), source);
break;
default:
bLAttachPoint = AttachPoint.getAttachmentPoint(firstIndent.text(), source);
}
annotationDecl.addAttachPoint(bLAttachPoint);
}
return annotationDecl;
}
@Override
public BLangNode transform(AnnotAccessExpressionNode annotAccessExpressionNode) {
BLangAnnotAccessExpr annotAccessExpr = (BLangAnnotAccessExpr) TreeBuilder.createAnnotAccessExpressionNode();
Node annotTagReference = annotAccessExpressionNode.annotTagReference();
if (annotAccessExpressionNode.annotTagReference().kind() == SyntaxKind.SIMPLE_NAME_REFERENCE) {
SimpleNameReferenceNode annotName = (SimpleNameReferenceNode) annotTagReference;
annotAccessExpr.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
annotAccessExpr.annotationName = createIdentifier(annotName.name());
} else {
QualifiedNameReferenceNode qulifiedName =
(QualifiedNameReferenceNode) annotTagReference;
annotAccessExpr.pkgAlias = createIdentifier(qulifiedName.modulePrefix());
annotAccessExpr.annotationName = createIdentifier(qulifiedName.identifier());
}
annotAccessExpr.pos = getPosition(annotAccessExpressionNode);
annotAccessExpr.expr = createExpression(annotAccessExpressionNode.expression());
return annotAccessExpr;
}
@Override
public BLangNode transform(ConditionalExpressionNode conditionalExpressionNode) {
BLangTernaryExpr ternaryExpr = (BLangTernaryExpr) TreeBuilder.createTernaryExpressionNode();
ternaryExpr.pos = getPosition(conditionalExpressionNode);
ternaryExpr.elseExpr = createExpression(conditionalExpressionNode.endExpression());
ternaryExpr.thenExpr = createExpression(conditionalExpressionNode.middleExpression());
ternaryExpr.expr = createExpression(conditionalExpressionNode.lhsExpression());
if (ternaryExpr.expr.getKind() == NodeKind.TERNARY_EXPR) {
BLangTernaryExpr root = (BLangTernaryExpr) ternaryExpr.expr;
BLangTernaryExpr parent = root;
while (parent.elseExpr.getKind() == NodeKind.TERNARY_EXPR) {
parent = (BLangTernaryExpr) parent.elseExpr;
}
ternaryExpr.expr = parent.elseExpr;
parent.elseExpr = ternaryExpr;
ternaryExpr = root;
}
return ternaryExpr;
}
@Override
public BLangNode transform(CheckExpressionNode checkExpressionNode) {
Location pos = getPosition(checkExpressionNode);
BLangExpression expr = createExpression(checkExpressionNode.expression());
if (checkExpressionNode.checkKeyword().kind() == SyntaxKind.CHECK_KEYWORD) {
return createCheckExpr(pos, expr);
}
return createCheckPanickedExpr(pos, expr);
}
@Override
public BLangNode transform(TypeTestExpressionNode typeTestExpressionNode) {
BLangTypeTestExpr typeTestExpr = (BLangTypeTestExpr) TreeBuilder.createTypeTestExpressionNode();
typeTestExpr.expr = createExpression(typeTestExpressionNode.expression());
typeTestExpr.typeNode = createTypeNode(typeTestExpressionNode.typeDescriptor());
typeTestExpr.pos = getPosition(typeTestExpressionNode);
return typeTestExpr;
}
@Override
public BLangNode transform(MappingConstructorExpressionNode mapConstruct) {
BLangRecordLiteral bLiteralNode = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
for (MappingFieldNode field : mapConstruct.fields()) {
if (field.kind() == SyntaxKind.SPREAD_FIELD) {
SpreadFieldNode spreadFieldNode = (SpreadFieldNode) field;
BLangRecordSpreadOperatorField bLRecordSpreadOpField =
(BLangRecordSpreadOperatorField) TreeBuilder.createRecordSpreadOperatorField();
bLRecordSpreadOpField.expr = createExpression(spreadFieldNode.valueExpr());
bLRecordSpreadOpField.pos = getPosition(spreadFieldNode);
bLiteralNode.fields.add(bLRecordSpreadOpField);
} else if (field.kind() == SyntaxKind.COMPUTED_NAME_FIELD) {
ComputedNameFieldNode computedNameField = (ComputedNameFieldNode) field;
BLangRecordKeyValueField bLRecordKeyValueField =
(BLangRecordKeyValueField) TreeBuilder.createRecordKeyValue();
bLRecordKeyValueField.valueExpr = createExpression(computedNameField.valueExpr());
bLRecordKeyValueField.key =
new BLangRecordLiteral.BLangRecordKey(createExpression(computedNameField.fieldNameExpr()));
bLRecordKeyValueField.key.computedKey = true;
bLiteralNode.fields.add(bLRecordKeyValueField);
} else {
SpecificFieldNode specificField = (SpecificFieldNode) field;
io.ballerina.compiler.syntax.tree.ExpressionNode valueExpr = specificField.valueExpr().orElse(null);
if (valueExpr == null) {
BLangRecordLiteral.BLangRecordVarNameField fieldVar =
(BLangRecordLiteral.BLangRecordVarNameField) TreeBuilder.createRecordVarRefNameFieldNode();
fieldVar.variableName = createIdentifier((Token) ((SpecificFieldNode) field).fieldName());
fieldVar.pkgAlias = createIdentifier(null, "");
fieldVar.pos = fieldVar.variableName.pos;
fieldVar.readonly = specificField.readonlyKeyword().isPresent();
bLiteralNode.fields.add(fieldVar);
} else {
BLangRecordKeyValueField bLRecordKeyValueField =
(BLangRecordKeyValueField) TreeBuilder.createRecordKeyValue();
bLRecordKeyValueField.pos = getPosition(specificField);
bLRecordKeyValueField.readonly = specificField.readonlyKeyword().isPresent();
bLRecordKeyValueField.valueExpr = createExpression(valueExpr);
bLRecordKeyValueField.key =
new BLangRecordLiteral.BLangRecordKey(createExpression(specificField.fieldName()));
bLRecordKeyValueField.key.computedKey = false;
bLRecordKeyValueField.key.pos = getPosition(specificField.fieldName());
bLiteralNode.fields.add(bLRecordKeyValueField);
}
}
}
bLiteralNode.pos = getPosition(mapConstruct);
return bLiteralNode;
}
@Override
public BLangNode transform(ListConstructorExpressionNode listConstructorExprNode) {
List<BLangExpression> argExprList = new ArrayList<>();
BLangListConstructorExpr listConstructorExpr = (BLangListConstructorExpr)
TreeBuilder.createListConstructorExpressionNode();
for (Node expr : listConstructorExprNode.expressions()) {
argExprList.add(createExpression(expr));
}
listConstructorExpr.exprs = argExprList;
listConstructorExpr.pos = getPosition(listConstructorExprNode);
return listConstructorExpr;
}
@Override
public BLangNode transform(UnaryExpressionNode unaryExprNode) {
Location pos = getPosition(unaryExprNode);
SyntaxKind expressionKind = unaryExprNode.expression().kind();
if (expressionKind == SyntaxKind.NUMERIC_LITERAL) {
BLangNumericLiteral numericLiteral = (BLangNumericLiteral) createSimpleLiteral(unaryExprNode);
return numericLiteral;
}
OperatorKind operator = OperatorKind.valueFrom(unaryExprNode.unaryOperator().text());
BLangExpression expr = createExpression(unaryExprNode.expression());
return createBLangUnaryExpr(pos, operator, expr);
}
@Override
public BLangNode transform(TypeofExpressionNode typeofExpressionNode) {
Location pos = getPosition(typeofExpressionNode);
OperatorKind operator = OperatorKind.valueFrom(typeofExpressionNode.typeofKeyword().text());
BLangExpression expr = createExpression(typeofExpressionNode.expression());
return createBLangUnaryExpr(pos, operator, expr);
}
@Override
public BLangNode transform(BinaryExpressionNode binaryExprNode) {
if (binaryExprNode.operator().kind() == SyntaxKind.ELVIS_TOKEN) {
BLangElvisExpr elvisExpr = (BLangElvisExpr) TreeBuilder.createElvisExpressionNode();
elvisExpr.pos = getPosition(binaryExprNode);
elvisExpr.lhsExpr = createExpression(binaryExprNode.lhsExpr());
elvisExpr.rhsExpr = createExpression(binaryExprNode.rhsExpr());
return elvisExpr;
}
BLangBinaryExpr bLBinaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
bLBinaryExpr.pos = getPosition(binaryExprNode);
bLBinaryExpr.lhsExpr = createExpression(binaryExprNode.lhsExpr());
bLBinaryExpr.rhsExpr = createExpression(binaryExprNode.rhsExpr());
bLBinaryExpr.opKind = OperatorKind.valueFrom(binaryExprNode.operator().text());
return bLBinaryExpr;
}
@Override
public BLangNode transform(FieldAccessExpressionNode fieldAccessExprNode) {
BLangFieldBasedAccess bLFieldBasedAccess;
Node fieldName = fieldAccessExprNode.fieldName();
if (fieldName.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
QualifiedNameReferenceNode qualifiedFieldName = (QualifiedNameReferenceNode) fieldName;
BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess accessWithPrefixNode =
(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess)
TreeBuilder.createFieldBasedAccessWithPrefixNode();
accessWithPrefixNode.nsPrefix = createIdentifier(qualifiedFieldName.modulePrefix());
accessWithPrefixNode.field = createIdentifier(qualifiedFieldName.identifier());
bLFieldBasedAccess = accessWithPrefixNode;
bLFieldBasedAccess.fieldKind = FieldKind.WITH_NS;
} else {
bLFieldBasedAccess = (BLangFieldBasedAccess) TreeBuilder.createFieldBasedAccessNode();
bLFieldBasedAccess.field =
createIdentifier(((SimpleNameReferenceNode) fieldName).name());
bLFieldBasedAccess.fieldKind = FieldKind.SINGLE;
}
io.ballerina.compiler.syntax.tree.ExpressionNode containerExpr = fieldAccessExprNode.expression();
if (containerExpr.kind() == SyntaxKind.BRACED_EXPRESSION) {
bLFieldBasedAccess.expr = createExpression(((BracedExpressionNode) containerExpr).expression());
} else {
bLFieldBasedAccess.expr = createExpression(containerExpr);
}
bLFieldBasedAccess.pos = getPosition(fieldAccessExprNode);
bLFieldBasedAccess.field.pos = getPosition(fieldAccessExprNode.fieldName());
bLFieldBasedAccess.optionalFieldAccess = false;
return bLFieldBasedAccess;
}
@Override
public BLangNode transform(OptionalFieldAccessExpressionNode optionalFieldAccessExpressionNode) {
BLangFieldBasedAccess bLFieldBasedAccess = (BLangFieldBasedAccess) TreeBuilder.createFieldBasedAccessNode();
Node fieldName = optionalFieldAccessExpressionNode.fieldName();
if (fieldName.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
QualifiedNameReferenceNode qualifiedFieldName = (QualifiedNameReferenceNode) fieldName;
BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess accessWithPrefixNode =
(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) TreeBuilder
.createFieldBasedAccessWithPrefixNode();
accessWithPrefixNode.nsPrefix = createIdentifier(qualifiedFieldName.modulePrefix());
accessWithPrefixNode.field = createIdentifier(qualifiedFieldName.identifier());
bLFieldBasedAccess = accessWithPrefixNode;
bLFieldBasedAccess.fieldKind = FieldKind.WITH_NS;
} else {
bLFieldBasedAccess = (BLangFieldBasedAccess) TreeBuilder.createFieldBasedAccessNode();
bLFieldBasedAccess.field = createIdentifier(((SimpleNameReferenceNode) fieldName).name());
bLFieldBasedAccess.fieldKind = FieldKind.SINGLE;
}
bLFieldBasedAccess.pos = getPosition(optionalFieldAccessExpressionNode);
bLFieldBasedAccess.field.pos = getPosition(optionalFieldAccessExpressionNode.fieldName());
bLFieldBasedAccess.expr = createExpression(optionalFieldAccessExpressionNode.expression());
bLFieldBasedAccess.optionalFieldAccess = true;
return bLFieldBasedAccess;
}
@Override
public BLangNode transform(BracedExpressionNode brcExprOut) {
return createExpression(brcExprOut.expression());
}
@Override
public BLangNode transform(FunctionCallExpressionNode functionCallNode) {
return createBLangInvocation(functionCallNode.functionName(), functionCallNode.arguments(),
getPosition(functionCallNode), isFunctionCallAsync(functionCallNode));
}
@Override
public BLangNode transform(ErrorConstructorExpressionNode errorConstructorExprNode) {
BLangErrorConstructorExpr errorConstructorExpr =
(BLangErrorConstructorExpr) TreeBuilder.createErrorConstructorExpressionNode();
errorConstructorExpr.pos = getPosition(errorConstructorExprNode);
if (errorConstructorExprNode.typeReference().isPresent()) {
errorConstructorExpr.errorTypeRef =
(BLangUserDefinedType) createTypeNode(errorConstructorExprNode.typeReference().get());
}
List<BLangExpression> positionalArgs = new ArrayList<>();
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>();
for (Node argNode : errorConstructorExprNode.arguments()) {
if (argNode.kind() == SyntaxKind.POSITIONAL_ARG) {
positionalArgs.add((BLangExpression) transform((PositionalArgumentNode) argNode));
} else if (argNode.kind() == SyntaxKind.NAMED_ARG) {
namedArgs.add((BLangNamedArgsExpression) transform((NamedArgumentNode) argNode));
}
}
errorConstructorExpr.positionalArgs = positionalArgs;
errorConstructorExpr.namedArgs = namedArgs;
return errorConstructorExpr;
}
public BLangNode transform(MethodCallExpressionNode methodCallExprNode) {
BLangInvocation bLInvocation = createBLangInvocation(methodCallExprNode.methodName(),
methodCallExprNode.arguments(),
getPosition(methodCallExprNode), false);
bLInvocation.expr = createExpression(methodCallExprNode.expression());
return bLInvocation;
}
@Override
public BLangNode transform(ImplicitNewExpressionNode implicitNewExprNode) {
BLangTypeInit initNode = createTypeInit(implicitNewExprNode);
BLangInvocation invocationNode = createInvocation(implicitNewExprNode, implicitNewExprNode.newKeyword());
initNode.argsExpr.addAll(invocationNode.argExprs);
initNode.initInvocation = invocationNode;
return initNode;
}
@Override
public BLangNode transform(ExplicitNewExpressionNode explicitNewExprNode) {
BLangTypeInit initNode = createTypeInit(explicitNewExprNode);
BLangInvocation invocationNode = createInvocation(explicitNewExprNode, explicitNewExprNode.newKeyword());
initNode.argsExpr.addAll(invocationNode.argExprs);
initNode.initInvocation = invocationNode;
return initNode;
}
private boolean isFunctionCallAsync(FunctionCallExpressionNode functionCallExpressionNode) {
return functionCallExpressionNode.parent().kind() == SyntaxKind.START_ACTION;
}
private BLangTypeInit createTypeInit(NewExpressionNode expression) {
BLangTypeInit initNode = (BLangTypeInit) TreeBuilder.createInitNode();
initNode.pos = getPosition(expression);
if (expression.kind() == SyntaxKind.EXPLICIT_NEW_EXPRESSION) {
Node type = ((ExplicitNewExpressionNode) expression).typeDescriptor();
initNode.userDefinedType = createTypeNode(type);
}
return initNode;
}
private BLangInvocation createInvocation(NewExpressionNode expression, Token newKeyword) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = getPosition(expression);
populateArgsInvocation(expression, invocationNode);
BLangNameReference nameReference = createBLangNameReference(newKeyword);
invocationNode.name = (BLangIdentifier) nameReference.name;
invocationNode.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
return invocationNode;
}
private void populateArgsInvocation(NewExpressionNode expression, BLangInvocation invocationNode) {
Iterator<FunctionArgumentNode> argumentsIter = getArgumentNodesIterator(expression);
if (argumentsIter != null) {
while (argumentsIter.hasNext()) {
BLangExpression argument = createExpression(argumentsIter.next());
invocationNode.argExprs.add(argument);
}
}
}
private Iterator<FunctionArgumentNode> getArgumentNodesIterator(NewExpressionNode expression) {
Iterator<FunctionArgumentNode> argumentsIter = null;
if (expression.kind() == SyntaxKind.IMPLICIT_NEW_EXPRESSION) {
Optional<ParenthesizedArgList> argsList = ((ImplicitNewExpressionNode) expression).parenthesizedArgList();
if (argsList.isPresent()) {
ParenthesizedArgList argList = argsList.get();
argumentsIter = argList.arguments().iterator();
}
} else {
ParenthesizedArgList argList =
(ParenthesizedArgList) ((ExplicitNewExpressionNode) expression).parenthesizedArgList();
argumentsIter = argList.arguments().iterator();
}
return argumentsIter;
}
@Override
public BLangNode transform(IndexedExpressionNode indexedExpressionNode) {
BLangIndexBasedAccess indexBasedAccess = (BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode();
indexBasedAccess.pos = getPosition(indexedExpressionNode);
SeparatedNodeList<io.ballerina.compiler.syntax.tree.ExpressionNode> keys =
indexedExpressionNode.keyExpression();
if (keys.size() == 1) {
indexBasedAccess.indexExpr = createExpression(indexedExpressionNode.keyExpression().get(0));
} else {
BLangTableMultiKeyExpr multiKeyExpr =
(BLangTableMultiKeyExpr) TreeBuilder.createTableMultiKeyExpressionNode();
multiKeyExpr.pos = getPosition(keys.get(0), keys.get(keys.size() - 1));
List<BLangExpression> multiKeyIndexExprs = new ArrayList<>();
for (io.ballerina.compiler.syntax.tree.ExpressionNode keyExpr : keys) {
multiKeyIndexExprs.add(createExpression(keyExpr));
}
multiKeyExpr.multiKeyIndexExprs = multiKeyIndexExprs;
indexBasedAccess.indexExpr = multiKeyExpr;
}
Node containerExpr = indexedExpressionNode.containerExpression();
BLangExpression expression = createExpression(containerExpr);
if (containerExpr.kind() == SyntaxKind.BRACED_EXPRESSION) {
indexBasedAccess.expr = ((BLangGroupExpr) expression).expression;
BLangGroupExpr group = (BLangGroupExpr) TreeBuilder.createGroupExpressionNode();
group.expression = indexBasedAccess;
group.pos = getPosition(indexedExpressionNode);
return group;
} else if (containerExpr.kind() == SyntaxKind.XML_STEP_EXPRESSION) {
((BLangXMLNavigationAccess) expression).childIndex = indexBasedAccess.indexExpr;
return expression;
}
indexBasedAccess.expr = expression;
return indexBasedAccess;
}
@Override
public BLangTypeConversionExpr transform(TypeCastExpressionNode typeCastExpressionNode) {
BLangTypeConversionExpr typeConversionNode = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
typeConversionNode.pos = getPosition(typeCastExpressionNode);
TypeCastParamNode typeCastParamNode = typeCastExpressionNode.typeCastParam();
if (typeCastParamNode != null && typeCastParamNode.type().isPresent()) {
typeConversionNode.typeNode = createTypeNode(typeCastParamNode.type().get());
}
typeConversionNode.expr = createExpression(typeCastExpressionNode.expression());
typeConversionNode.annAttachments = applyAll(typeCastParamNode.annotations());
return typeConversionNode;
}
@Override
public BLangNode transform(Token token) {
SyntaxKind kind = token.kind();
switch (kind) {
case XML_TEXT_CONTENT:
case TEMPLATE_STRING:
case CLOSE_BRACE_TOKEN:
return createSimpleLiteral(token);
default:
throw new RuntimeException("Syntax kind is not supported: " + kind);
}
}
@Override
public BLangNode transform(InterpolationNode interpolationNode) {
return createExpression(interpolationNode.expression());
}
@Override
public BLangNode transform(TemplateExpressionNode expressionNode) {
SyntaxKind kind = expressionNode.kind();
switch (kind) {
case XML_TEMPLATE_EXPRESSION:
return createXmlTemplateLiteral(expressionNode);
case STRING_TEMPLATE_EXPRESSION:
return createStringTemplateLiteral(expressionNode.content(), getPosition(expressionNode));
case RAW_TEMPLATE_EXPRESSION:
return createRawTemplateLiteral(expressionNode.content(), getPosition(expressionNode));
default:
throw new RuntimeException("Syntax kind is not supported: " + kind);
}
}
@Override
public BLangNode transform(TableConstructorExpressionNode tableConstructorExpressionNode) {
BLangTableConstructorExpr tableConstructorExpr =
(BLangTableConstructorExpr) TreeBuilder.createTableConstructorExpressionNode();
tableConstructorExpr.pos = getPosition(tableConstructorExpressionNode);
for (Node row : tableConstructorExpressionNode.rows()) {
tableConstructorExpr.addRecordLiteral((BLangRecordLiteral) row.apply(this));
}
if (tableConstructorExpressionNode.keySpecifier().isPresent()) {
tableConstructorExpr.tableKeySpecifier =
(BLangTableKeySpecifier) tableConstructorExpressionNode.keySpecifier().orElse(null).apply(this);
}
return tableConstructorExpr;
}
@Override
public BLangNode transform(TrapExpressionNode trapExpressionNode) {
BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode();
trapExpr.expr = createExpression(trapExpressionNode.expression());
trapExpr.pos = getPosition(trapExpressionNode);
return trapExpr;
}
@Override
public BLangNode transform(ReceiveActionNode receiveActionNode) {
BLangWorkerReceive workerReceiveExpr = (BLangWorkerReceive) TreeBuilder.createWorkerReceiveNode();
Node receiveWorkers = receiveActionNode.receiveWorkers();
Token workerName;
if (receiveWorkers.kind() == SyntaxKind.SIMPLE_NAME_REFERENCE) {
workerName = ((SimpleNameReferenceNode) receiveWorkers).name();
} else {
Location receiveFieldsPos = getPosition(receiveWorkers);
dlog.error(receiveFieldsPos, DiagnosticErrorCode.MULTIPLE_RECEIVE_ACTION_NOT_YET_SUPPORTED);
workerName = NodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN,
NodeFactory.createEmptyMinutiaeList(), NodeFactory.createEmptyMinutiaeList());
}
workerReceiveExpr.setWorkerName(createIdentifier(workerName));
workerReceiveExpr.pos = getPosition(receiveActionNode);
return workerReceiveExpr;
}
@Override
public BLangNode transform(SyncSendActionNode syncSendActionNode) {
BLangWorkerSyncSendExpr workerSendExpr = TreeBuilder.createWorkerSendSyncExprNode();
workerSendExpr.setWorkerName(createIdentifier(
syncSendActionNode.peerWorker().name()));
workerSendExpr.expr = createExpression(syncSendActionNode.expression());
workerSendExpr.pos = getPosition(syncSendActionNode);
return workerSendExpr;
}
@Override
public BLangNode transform(ImplicitAnonymousFunctionExpressionNode implicitAnonymousFunctionExpressionNode) {
BLangArrowFunction arrowFunction = (BLangArrowFunction) TreeBuilder.createArrowFunctionNode();
arrowFunction.pos = getPosition(implicitAnonymousFunctionExpressionNode);
arrowFunction.functionName = createIdentifier(arrowFunction.pos,
anonymousModelHelper.getNextAnonymousFunctionKey(packageID));
Node param = implicitAnonymousFunctionExpressionNode.params();
if (param.kind() == SyntaxKind.INFER_PARAM_LIST) {
ImplicitAnonymousFunctionParameters paramsNode = (ImplicitAnonymousFunctionParameters) param;
SeparatedNodeList<SimpleNameReferenceNode> paramList = paramsNode.parameters();
for (SimpleNameReferenceNode child : paramList) {
BLangUserDefinedType userDefinedType = (BLangUserDefinedType) child.apply(this);
BLangSimpleVariable parameter = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
parameter.name = userDefinedType.typeName;
parameter.pos = getPosition(child);
arrowFunction.params.add(parameter);
}
} else {
BLangUserDefinedType userDefinedType = (BLangUserDefinedType) param.apply(this);
BLangSimpleVariable parameter = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
parameter.name = userDefinedType.typeName;
parameter.pos = getPosition(param);
arrowFunction.params.add(parameter);
}
arrowFunction.body = new BLangExprFunctionBody();
arrowFunction.body.expr = createExpression(implicitAnonymousFunctionExpressionNode.expression());
arrowFunction.body.pos = arrowFunction.body.expr.pos;
return arrowFunction;
}
@Override
public BLangNode transform(CommitActionNode commitActionNode) {
BLangCommitExpr commitExpr = TreeBuilder.createCommitExpressionNode();
commitExpr.pos = getPosition(commitActionNode);
return commitExpr;
}
@Override
public BLangNode transform(FlushActionNode flushActionNode) {
BLangWorkerFlushExpr workerFlushExpr = TreeBuilder.createWorkerFlushExpressionNode();
Node optionalPeerWorker = flushActionNode.peerWorker().orElse(null);
if (optionalPeerWorker != null) {
SimpleNameReferenceNode peerWorker = (SimpleNameReferenceNode) optionalPeerWorker;
workerFlushExpr.workerIdentifier = createIdentifier(peerWorker.name());
}
workerFlushExpr.pos = getPosition(flushActionNode);
return workerFlushExpr;
}
@Override
public BLangNode transform(LetExpressionNode letExpressionNode) {
BLangLetExpression letExpr = (BLangLetExpression) TreeBuilder.createLetExpressionNode();
letExpr.pos = getPosition(letExpressionNode);
letExpr.expr = createExpression(letExpressionNode.expression());
List<BLangLetVariable> letVars = new ArrayList<>();
for (LetVariableDeclarationNode letVarDecl : letExpressionNode.letVarDeclarations()) {
letVars.add(createLetVariable(letVarDecl));
}
letExpr.letVarDeclarations = letVars;
return letExpr;
}
public BLangLetVariable createLetVariable(LetVariableDeclarationNode letVarDecl) {
BLangLetVariable letVar = TreeBuilder.createLetVariableNode();
VariableDefinitionNode varDefNode = createBLangVarDef(getPosition(letVarDecl), letVarDecl.typedBindingPattern(),
Optional.of(letVarDecl.expression()), Optional.empty());
varDefNode.getVariable().addFlag(Flag.FINAL);
List<BLangNode> annots = applyAll(letVarDecl.annotations());
for (BLangNode node : annots) {
varDefNode.getVariable().addAnnotationAttachment((AnnotationAttachmentNode) node);
}
letVar.definitionNode = varDefNode;
return letVar;
}
@Override
public BLangNode transform(MappingBindingPatternNode mappingBindingPatternNode) {
BLangRecordVarRef recordVarRef = (BLangRecordVarRef) TreeBuilder.createRecordVariableReferenceNode();
recordVarRef.pos = getPosition(mappingBindingPatternNode);
List<BLangRecordVarRefKeyValue> expressions = new ArrayList<>();
for (BindingPatternNode expr : mappingBindingPatternNode.fieldBindingPatterns()) {
if (expr.kind() == SyntaxKind.REST_BINDING_PATTERN) {
recordVarRef.restParam = createExpression(expr);
} else {
expressions.add(createRecordVarKeyValue(expr));
}
}
recordVarRef.recordRefFields = expressions;
return recordVarRef;
}
private BLangRecordVarRefKeyValue createRecordVarKeyValue(BindingPatternNode expr) {
BLangRecordVarRefKeyValue keyValue = new BLangRecordVarRefKeyValue();
if (expr instanceof FieldBindingPatternFullNode) {
FieldBindingPatternFullNode fullNode = (FieldBindingPatternFullNode) expr;
keyValue.variableName = createIdentifier(fullNode.variableName().name());
keyValue.variableReference = createExpression(fullNode.bindingPattern());
} else {
FieldBindingPatternVarnameNode varnameNode = (FieldBindingPatternVarnameNode) expr;
keyValue.variableName = createIdentifier(varnameNode.variableName().name());
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = getPosition(varnameNode.variableName());
varRef.variableName = createIdentifier(varnameNode.variableName().name());
varRef.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
keyValue.variableReference = varRef;
}
return keyValue;
}
@Override
public BLangNode transform(ListBindingPatternNode listBindingPatternNode) {
BLangTupleVarRef tupleVarRef = (BLangTupleVarRef) TreeBuilder.createTupleVariableReferenceNode();
List<BLangExpression> expressions = new ArrayList<>();
for (BindingPatternNode expr : listBindingPatternNode.bindingPatterns()) {
if (expr.kind() == SyntaxKind.REST_BINDING_PATTERN) {
tupleVarRef.restParam = createExpression(expr);
} else {
expressions.add(createExpression(expr));
}
}
tupleVarRef.expressions = expressions;
tupleVarRef.pos = getPosition(listBindingPatternNode);
return tupleVarRef;
}
@Override
public BLangNode transform(RestBindingPatternNode restBindingPatternNode) {
return createExpression(restBindingPatternNode.variableName());
}
@Override
public BLangNode transform(CaptureBindingPatternNode captureBindingPatternNode) {
return createExpression(captureBindingPatternNode.variableName());
}
@Override
public BLangNode transform(WildcardBindingPatternNode wildcardBindingPatternNode) {
BLangSimpleVarRef ignoreVarRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
BLangIdentifier ignore = (BLangIdentifier) TreeBuilder.createIdentifierNode();
ignore.value = Names.IGNORE.value;
ignoreVarRef.variableName = ignore;
ignore.pos = getPosition(wildcardBindingPatternNode);
return ignoreVarRef;
}
@Override
public BLangNode transform(ErrorBindingPatternNode errorBindingPatternNode) {
BLangErrorVarRef errorVarRef = (BLangErrorVarRef) TreeBuilder.createErrorVariableReferenceNode();
errorVarRef.pos = getPosition(errorBindingPatternNode);
Optional<Node> errorTypeRef = errorBindingPatternNode.typeReference();
if (errorTypeRef.isPresent()) {
errorVarRef.typeNode = createTypeNode(errorTypeRef.get());
}
SeparatedNodeList<BindingPatternNode> argListBindingPatterns = errorBindingPatternNode.argListBindingPatterns();
int numberOfArgs = argListBindingPatterns.size();
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>();
for (int position = 0; position < numberOfArgs; position++) {
BindingPatternNode bindingPatternNode = argListBindingPatterns.get(position);
switch (bindingPatternNode.kind()) {
case CAPTURE_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
if (position == 0) {
errorVarRef.message = (BLangVariableReference) createExpression(bindingPatternNode);
break;
}
case ERROR_BINDING_PATTERN:
errorVarRef.cause = (BLangVariableReference) createExpression(bindingPatternNode);
break;
case NAMED_ARG_BINDING_PATTERN:
namedArgs.add((BLangNamedArgsExpression) bindingPatternNode.apply(this));
break;
default:
errorVarRef.restVar = (BLangVariableReference) createExpression(bindingPatternNode);
}
}
errorVarRef.detail = namedArgs;
return errorVarRef;
}
@Override
public BLangNode transform(NamedArgBindingPatternNode namedArgBindingPatternNode) {
BLangNamedArgsExpression namedArgsExpression = (BLangNamedArgsExpression) TreeBuilder.createNamedArgNode();
namedArgsExpression.pos = getPosition(namedArgBindingPatternNode);
namedArgsExpression.name = createIdentifier(namedArgBindingPatternNode.argName());
namedArgsExpression.expr = createExpression(namedArgBindingPatternNode.bindingPattern());
return namedArgsExpression;
}
@Override
public BLangNode transform(ReturnStatementNode returnStmtNode) {
BLangReturn bLReturn = (BLangReturn) TreeBuilder.createReturnNode();
bLReturn.pos = getPosition(returnStmtNode);
if (returnStmtNode.expression().isPresent()) {
bLReturn.expr = createExpression(returnStmtNode.expression().get());
} else {
BLangLiteral nilLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
nilLiteral.pos = getPosition(returnStmtNode);
nilLiteral.value = Names.NIL_VALUE;
nilLiteral.type = symTable.nilType;
bLReturn.expr = nilLiteral;
}
return bLReturn;
}
@Override
public BLangNode transform(PanicStatementNode panicStmtNode) {
BLangPanic bLPanic = (BLangPanic) TreeBuilder.createPanicNode();
bLPanic.pos = getPosition(panicStmtNode);
bLPanic.expr = createExpression(panicStmtNode.expression());
return bLPanic;
}
@Override
public BLangNode transform(ContinueStatementNode continueStmtNode) {
BLangContinue bLContinue = (BLangContinue) TreeBuilder.createContinueNode();
bLContinue.pos = getPosition(continueStmtNode);
return bLContinue;
}
@Override
public BLangNode transform(ListenerDeclarationNode listenerDeclarationNode) {
Token visibilityQualifier = null;
if (listenerDeclarationNode.visibilityQualifier().isPresent()) {
visibilityQualifier = listenerDeclarationNode.visibilityQualifier().get();
}
BLangSimpleVariable var = new SimpleVarBuilder()
.with(listenerDeclarationNode.variableName())
.setTypeByNode(listenerDeclarationNode.typeDescriptor().orElse(null))
.setExpressionByNode(listenerDeclarationNode.initializer())
.setVisibility(visibilityQualifier)
.isListenerVar()
.build();
var.pos = getPositionWithoutMetadata(listenerDeclarationNode);
var.name.pos = getPosition(listenerDeclarationNode.variableName());
var.annAttachments = applyAll(getAnnotations(listenerDeclarationNode.metadata()));
return var;
}
@Override
public BLangNode transform(BreakStatementNode breakStmtNode) {
BLangBreak bLBreak = (BLangBreak) TreeBuilder.createBreakNode();
bLBreak.pos = getPosition(breakStmtNode);
return bLBreak;
}
@Override
public BLangNode transform(AssignmentStatementNode assignmentStmtNode) {
SyntaxKind lhsKind = assignmentStmtNode.varRef().kind();
switch (lhsKind) {
case LIST_BINDING_PATTERN:
return createTupleDestructureStatement(assignmentStmtNode);
case MAPPING_BINDING_PATTERN:
return createRecordDestructureStatement(assignmentStmtNode);
case ERROR_BINDING_PATTERN:
return createErrorDestructureStatement(assignmentStmtNode);
default:
break;
}
BLangAssignment bLAssignment = (BLangAssignment) TreeBuilder.createAssignmentNode();
BLangExpression lhsExpr = createExpression(assignmentStmtNode.varRef());
validateLvexpr(lhsExpr, DiagnosticErrorCode.INVALID_INVOCATION_LVALUE_ASSIGNMENT);
bLAssignment.setExpression(createExpression(assignmentStmtNode.expression()));
bLAssignment.pos = getPosition(assignmentStmtNode);
bLAssignment.varRef = lhsExpr;
return bLAssignment;
}
public BLangNode createTupleDestructureStatement(AssignmentStatementNode assignmentStmtNode) {
BLangTupleDestructure tupleDestructure =
(BLangTupleDestructure) TreeBuilder.createTupleDestructureStatementNode();
tupleDestructure.varRef = (BLangTupleVarRef) createExpression(assignmentStmtNode.varRef());
tupleDestructure.setExpression(createExpression(assignmentStmtNode.expression()));
tupleDestructure.pos = getPosition(assignmentStmtNode);
return tupleDestructure;
}
public BLangNode createRecordDestructureStatement(AssignmentStatementNode assignmentStmtNode) {
BLangRecordDestructure recordDestructure =
(BLangRecordDestructure) TreeBuilder.createRecordDestructureStatementNode();
recordDestructure.varRef = (BLangRecordVarRef) createExpression(assignmentStmtNode.varRef());
recordDestructure.setExpression(createExpression(assignmentStmtNode.expression()));
recordDestructure.pos = getPosition(assignmentStmtNode);
return recordDestructure;
}
public BLangNode createErrorDestructureStatement(AssignmentStatementNode assignmentStmtNode) {
BLangErrorDestructure errorDestructure =
(BLangErrorDestructure) TreeBuilder.createErrorDestructureStatementNode();
errorDestructure.varRef = (BLangErrorVarRef) createExpression(assignmentStmtNode.varRef());
errorDestructure.setExpression(createExpression(assignmentStmtNode.expression()));
errorDestructure.pos = getPosition(assignmentStmtNode);
return errorDestructure;
}
@Override
public BLangNode transform(CompoundAssignmentStatementNode compoundAssignmentStmtNode) {
BLangCompoundAssignment bLCompAssignment = (BLangCompoundAssignment) TreeBuilder.createCompoundAssignmentNode();
bLCompAssignment.setExpression(createExpression(compoundAssignmentStmtNode.rhsExpression()));
bLCompAssignment
.setVariable((VariableReferenceNode) createExpression(compoundAssignmentStmtNode.lhsExpression()));
bLCompAssignment.pos = getPosition(compoundAssignmentStmtNode);
bLCompAssignment.opKind = OperatorKind.valueFrom(compoundAssignmentStmtNode.binaryOperator().text());
return bLCompAssignment;
}
private void validateLvexpr(ExpressionNode lExprNode, DiagnosticCode errorCode) {
if (lExprNode.getKind() == NodeKind.INVOCATION) {
dlog.error(((BLangInvocation) lExprNode).pos, errorCode);
}
if (lExprNode.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR
|| lExprNode.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
validateLvexpr(((BLangAccessExpression) lExprNode).expr, errorCode);
}
}
@Override
public BLangNode transform(DoStatementNode doStatementNode) {
BLangDo bLDo = (BLangDo) TreeBuilder.createDoNode();
bLDo.pos = getPosition(doStatementNode);
BLangBlockStmt bLBlockStmt = (BLangBlockStmt) doStatementNode.blockStatement().apply(this);
bLBlockStmt.pos = getPosition(doStatementNode.blockStatement());
bLDo.setBody(bLBlockStmt);
doStatementNode.onFailClause().ifPresent(onFailClauseNode -> {
bLDo.setOnFailClause(
(org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this)));
});
return bLDo;
}
@Override
public BLangNode transform(FailStatementNode failStatementNode) {
BLangFail bLFail = (BLangFail) TreeBuilder.createFailNode();
bLFail.pos = getPosition(failStatementNode);
bLFail.expr = createExpression(failStatementNode.expression());
return bLFail;
}
@Override
public BLangNode transform(WhileStatementNode whileStmtNode) {
BLangWhile bLWhile = (BLangWhile) TreeBuilder.createWhileNode();
bLWhile.setCondition(createExpression(whileStmtNode.condition()));
bLWhile.pos = getPosition(whileStmtNode);
BLangBlockStmt bLBlockStmt = (BLangBlockStmt) whileStmtNode.whileBody().apply(this);
bLBlockStmt.pos = getPosition(whileStmtNode.whileBody());
bLWhile.setBody(bLBlockStmt);
whileStmtNode.onFailClause().ifPresent(onFailClauseNode -> {
bLWhile.setOnFailClause(
(org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this)));
});
return bLWhile;
}
@Override
public BLangNode transform(IfElseStatementNode ifElseStmtNode) {
BLangIf bLIf = (BLangIf) TreeBuilder.createIfElseStatementNode();
bLIf.pos = getPosition(ifElseStmtNode);
bLIf.setCondition(createExpression(ifElseStmtNode.condition()));
bLIf.setBody((BLangBlockStmt) ifElseStmtNode.ifBody().apply(this));
ifElseStmtNode.elseBody().ifPresent(elseBody -> {
ElseBlockNode elseNode = (ElseBlockNode) elseBody;
bLIf.setElseStatement(
(org.ballerinalang.model.tree.statements.StatementNode) elseNode.elseBody().apply(this));
});
return bLIf;
}
@Override
public BLangNode transform(BlockStatementNode blockStatement) {
BLangBlockStmt bLBlockStmt = (BLangBlockStmt) TreeBuilder.createBlockNode();
this.isInLocalContext = true;
bLBlockStmt.stmts = generateBLangStatements(blockStatement.statements());
this.isInLocalContext = false;
bLBlockStmt.pos = getPosition(blockStatement);
SyntaxKind parent = blockStatement.parent().kind();
if (parent == SyntaxKind.IF_ELSE_STATEMENT || parent == SyntaxKind.ELSE_BLOCK) {
bLBlockStmt.pos = expandLeft(bLBlockStmt.pos, getPosition(blockStatement.parent()));
}
return bLBlockStmt;
}
@Override
public BLangNode transform(RollbackStatementNode rollbackStatementNode) {
BLangRollback rollbackStmt = (BLangRollback) TreeBuilder.createRollbackNode();
rollbackStmt.pos = getPosition(rollbackStatementNode);
if (rollbackStatementNode.expression().isPresent()) {
rollbackStmt.expr = createExpression(rollbackStatementNode.expression().get());
}
return rollbackStmt;
}
@Override
public BLangNode transform(LockStatementNode lockStatementNode) {
BLangLock lockNode = (BLangLock) TreeBuilder.createLockNode();
lockNode.pos = getPosition(lockStatementNode);
BLangBlockStmt lockBlock = (BLangBlockStmt) lockStatementNode.blockStatement().apply(this);
lockBlock.pos = getPosition(lockStatementNode.blockStatement());
lockNode.setBody(lockBlock);
lockStatementNode.onFailClause().ifPresent(onFailClauseNode -> {
lockNode.setOnFailClause(
(org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this)));
});
return lockNode;
}
@Override
public BLangNode transform(TypedescTypeDescriptorNode typedescTypeDescriptorNode) {
BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
refType.typeKind = TypeKind.TYPEDESC;
Optional<TypeParameterNode> node = typedescTypeDescriptorNode.typedescTypeParamsNode();
if (node.isPresent()) {
BLangConstrainedType constrainedType = (BLangConstrainedType) TreeBuilder.createConstrainedTypeNode();
constrainedType.type = refType;
constrainedType.constraint = createTypeNode(node.get().typeNode());
constrainedType.pos = getPosition(typedescTypeDescriptorNode);
return constrainedType;
}
return refType;
}
@Override
public BLangNode transform(VariableDeclarationNode varDeclaration) {
return (BLangNode) createBLangVarDef(getPosition(varDeclaration), varDeclaration.typedBindingPattern(),
varDeclaration.initializer(), varDeclaration.finalKeyword());
}
public BLangNode transform(XmlTypeDescriptorNode xmlTypeDescriptorNode) {
BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
refType.typeKind = TypeKind.XML;
refType.pos = getPosition(xmlTypeDescriptorNode);
Optional<TypeParameterNode> node = xmlTypeDescriptorNode.xmlTypeParamsNode();
if (node.isPresent()) {
BLangConstrainedType constrainedType = (BLangConstrainedType) TreeBuilder.createConstrainedTypeNode();
constrainedType.type = refType;
constrainedType.constraint = createTypeNode(node.get().typeNode());
constrainedType.pos = getPosition(xmlTypeDescriptorNode);
return constrainedType;
}
return refType;
}
private VariableDefinitionNode createBLangVarDef(Location location,
TypedBindingPatternNode typedBindingPattern,
Optional<io.ballerina.compiler.syntax.tree.ExpressionNode> initializer,
Optional<Token> finalKeyword) {
BindingPatternNode bindingPattern = typedBindingPattern.bindingPattern();
BLangVariable variable = getBLangVariableNode(bindingPattern);
List<Token> qualifiers = new ArrayList<>();
if (finalKeyword.isPresent()) {
qualifiers.add(finalKeyword.get());
}
NodeList<Token> qualifierList = NodeFactory.createNodeList(qualifiers);
switch (bindingPattern.kind()) {
case CAPTURE_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
BLangSimpleVariableDef bLVarDef =
(BLangSimpleVariableDef) TreeBuilder.createSimpleVariableDefinitionNode();
bLVarDef.pos = variable.pos = location;
BLangExpression expr = initializer.isPresent() ? createExpression(initializer.get()) : null;
variable.setInitialExpression(expr);
bLVarDef.setVariable(variable);
if (finalKeyword.isPresent()) {
variable.flagSet.add(Flag.FINAL);
}
TypeDescriptorNode typeDesc = typedBindingPattern.typeDescriptor();
variable.isDeclaredWithVar = isDeclaredWithVar(typeDesc);
if (!variable.isDeclaredWithVar) {
variable.setTypeNode(createTypeNode(typeDesc));
}
return bLVarDef;
case MAPPING_BINDING_PATTERN:
initializeBLangVariable(variable, typedBindingPattern.typeDescriptor(), initializer,
qualifierList);
return createRecordVariableDef(variable);
case LIST_BINDING_PATTERN:
initializeBLangVariable(variable, typedBindingPattern.typeDescriptor(), initializer,
qualifierList);
return createTupleVariableDef(variable);
case ERROR_BINDING_PATTERN:
initializeBLangVariable(variable, typedBindingPattern.typeDescriptor(), initializer,
qualifierList);
return createErrorVariableDef(variable);
default:
throw new RuntimeException(
"Syntax kind is not a valid binding pattern " + typedBindingPattern.bindingPattern().kind());
}
}
private void initializeBLangVariable(BLangVariable var, TypeDescriptorNode type,
Optional<io.ballerina.compiler.syntax.tree.ExpressionNode> initializer,
NodeList<Token> qualifiers) {
for (Token qualifier : qualifiers) {
SyntaxKind kind = qualifier.kind();
if (kind == SyntaxKind.FINAL_KEYWORD) {
markVariableWithFlag(var, Flag.FINAL);
} else if (qualifier.kind() == SyntaxKind.CONFIGURABLE_KEYWORD) {
var.flagSet.add(Flag.CONFIGURABLE);
if (initializer.get().kind() == SyntaxKind.REQUIRED_EXPRESSION) {
var.flagSet.add(Flag.REQUIRED);
initializer = Optional.empty();
}
} else if (kind == SyntaxKind.ISOLATED_KEYWORD) {
var.flagSet.add(Flag.ISOLATED);
}
}
var.isDeclaredWithVar = isDeclaredWithVar(type);
if (!var.isDeclaredWithVar) {
var.setTypeNode(createTypeNode(type));
}
if (initializer.isPresent()) {
var.setInitialExpression(createExpression(initializer.get()));
}
}
private BLangRecordVariableDef createRecordVariableDef(BLangVariable var) {
BLangRecordVariableDef varDefNode = (BLangRecordVariableDef) TreeBuilder.createRecordVariableDefinitionNode();
varDefNode.pos = var.pos;
varDefNode.setVariable(var);
return varDefNode;
}
private BLangTupleVariableDef createTupleVariableDef(BLangVariable tupleVar) {
BLangTupleVariableDef varDefNode = (BLangTupleVariableDef) TreeBuilder.createTupleVariableDefinitionNode();
varDefNode.pos = getPosition(null);
varDefNode.setVariable(tupleVar);
return varDefNode;
}
private BLangErrorVariableDef createErrorVariableDef(BLangVariable errorVar) {
BLangErrorVariableDef varDefNode = (BLangErrorVariableDef) TreeBuilder.createErrorVariableDefinitionNode();
varDefNode.pos = getPosition(null);
varDefNode.setVariable(errorVar);
return varDefNode;
}
@Override
public BLangNode transform(ExpressionStatementNode expressionStatement) {
SyntaxKind kind = expressionStatement.expression().kind();
switch (kind) {
case ASYNC_SEND_ACTION:
return expressionStatement.expression().apply(this);
default:
BLangExpressionStmt bLExpressionStmt =
(BLangExpressionStmt) TreeBuilder.createExpressionStatementNode();
bLExpressionStmt.expr = createExpression(expressionStatement.expression());
bLExpressionStmt.pos = getPosition(expressionStatement);
return bLExpressionStmt;
}
}
@Override
public BLangNode transform(AsyncSendActionNode asyncSendActionNode) {
BLangWorkerSend workerSendNode = (BLangWorkerSend) TreeBuilder.createWorkerSendNode();
workerSendNode.setWorkerName(createIdentifier(getPosition(asyncSendActionNode.peerWorker()),
asyncSendActionNode.peerWorker().name()));
workerSendNode.expr = createExpression(asyncSendActionNode.expression());
workerSendNode.pos = getPosition(asyncSendActionNode);
return workerSendNode;
}
@Override
public BLangNode transform(WaitActionNode waitActionNode) {
Node waitFutureExpr = waitActionNode.waitFutureExpr();
if (waitFutureExpr.kind() == SyntaxKind.WAIT_FIELDS_LIST) {
return getWaitForAllExpr((WaitFieldsListNode) waitFutureExpr);
}
BLangWaitExpr waitExpr = TreeBuilder.createWaitExpressionNode();
waitExpr.pos = getPosition(waitActionNode);
waitExpr.exprList = Collections.singletonList(createExpression(waitFutureExpr));
return waitExpr;
}
private BLangWaitForAllExpr getWaitForAllExpr(WaitFieldsListNode waitFields) {
BLangWaitForAllExpr bLangWaitForAll = TreeBuilder.createWaitForAllExpressionNode();
List<BLangWaitKeyValue> exprs = new ArrayList<>();
for (Node waitField : waitFields.waitFields()) {
exprs.add(getWaitForAllExpr(waitField));
}
bLangWaitForAll.keyValuePairs = exprs;
bLangWaitForAll.pos = getPosition(waitFields);
return bLangWaitForAll;
}
private BLangWaitKeyValue getWaitForAllExpr(Node waitFields) {
BLangWaitForAllExpr.BLangWaitKeyValue keyValue = TreeBuilder.createWaitKeyValueNode();
keyValue.pos = getPosition(waitFields);
if (waitFields.kind() == SyntaxKind.WAIT_FIELD) {
WaitFieldNode waitFieldNode = (WaitFieldNode) waitFields;
BLangIdentifier key = createIdentifier(waitFieldNode.fieldName().name());
key.setLiteral(false);
keyValue.key = key;
keyValue.valueExpr = createExpression(waitFieldNode.waitFutureExpr());
return keyValue;
}
SimpleNameReferenceNode varName = (SimpleNameReferenceNode) waitFields;
BLangIdentifier key = createIdentifier(varName.name());
key.setLiteral(false);
keyValue.key = key;
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = getPosition(varName);
varRef.variableName = key;
varRef.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
keyValue.keyExpr = varRef;
return keyValue;
}
@Override
public BLangNode transform(StartActionNode startActionNode) {
BLangNode expression = createActionOrExpression(startActionNode.expression());
BLangInvocation invocation;
if (!(expression instanceof BLangWorkerSend)) {
invocation = (BLangInvocation) expression;
} else {
invocation = (BLangInvocation) ((BLangWorkerSend) expression).expr;
expression = ((BLangWorkerSend) expression).expr;
}
if (expression.getKind() == NodeKind.INVOCATION) {
BLangActionInvocation actionInvocation = (BLangActionInvocation) TreeBuilder.createActionInvocation();
actionInvocation.expr = invocation.expr;
actionInvocation.pkgAlias = invocation.pkgAlias;
actionInvocation.name = invocation.name;
actionInvocation.argExprs = invocation.argExprs;
actionInvocation.flagSet = invocation.flagSet;
actionInvocation.pos = getPosition(startActionNode);
invocation = actionInvocation;
}
invocation.async = true;
invocation.annAttachments = applyAll(startActionNode.annotations());
return invocation;
}
@Override
public BLangNode transform(TransactionStatementNode transactionStatementNode) {
BLangTransaction transaction = (BLangTransaction) TreeBuilder.createTransactionNode();
BLangBlockStmt transactionBlock = (BLangBlockStmt) transactionStatementNode.blockStatement().apply(this);
transactionBlock.pos = getPosition(transactionStatementNode.blockStatement());
transaction.setTransactionBody(transactionBlock);
transaction.pos = getPosition(transactionStatementNode);
transactionStatementNode.onFailClause().ifPresent(onFailClauseNode -> {
transaction.setOnFailClause(
(org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this)));
});
return transaction;
}
@Override
public BLangNode transform(PositionalArgumentNode argumentNode) {
return createExpression(argumentNode.expression());
}
@Override
public BLangNode transform(NamedArgumentNode namedArgumentNode) {
BLangNamedArgsExpression namedArg = (BLangNamedArgsExpression) TreeBuilder.createNamedArgNode();
namedArg.pos = getPosition(namedArgumentNode);
namedArg.name = this.createIdentifier(namedArgumentNode.argumentName().name());
namedArg.expr = createExpression(namedArgumentNode.expression());
return namedArg;
}
@Override
public BLangNode transform(RestArgumentNode restArgumentNode) {
BLangRestArgsExpression varArgs = (BLangRestArgsExpression) TreeBuilder.createVarArgsNode();
varArgs.pos = getPosition(restArgumentNode.ellipsis());
varArgs.expr = createExpression(restArgumentNode.expression());
return varArgs;
}
@Override
public BLangNode transform(RequiredParameterNode requiredParameter) {
BLangSimpleVariable simpleVar = createSimpleVar(requiredParameter.paramName(),
requiredParameter.typeName(), requiredParameter.annotations());
simpleVar.pos = getPosition(requiredParameter);
if (requiredParameter.paramName().isPresent()) {
simpleVar.name.pos = getPosition(requiredParameter.paramName().get());
}
simpleVar.flagSet.add(Flag.REQUIRED_PARAM);
simpleVar.pos = trimLeft(simpleVar.pos, getPosition(requiredParameter.typeName()));
return simpleVar;
}
@Override
public BLangNode transform(IncludedRecordParameterNode includedRecordParameterNode) {
BLangSimpleVariable simpleVar = createSimpleVar(includedRecordParameterNode.paramName(),
includedRecordParameterNode.typeName(), includedRecordParameterNode.annotations());
simpleVar.flagSet.add(INCLUDED);
simpleVar.pos = getPosition(includedRecordParameterNode);
if (includedRecordParameterNode.paramName().isPresent()) {
simpleVar.name.pos = getPosition(includedRecordParameterNode.paramName().get());
}
simpleVar.pos = trimLeft(simpleVar.pos, getPosition(includedRecordParameterNode.typeName()));
return simpleVar;
}
@Override
public BLangNode transform(DefaultableParameterNode defaultableParameter) {
BLangSimpleVariable simpleVar = createSimpleVar(defaultableParameter.paramName(),
defaultableParameter.typeName(),
defaultableParameter.annotations());
simpleVar.setInitialExpression(createExpression(defaultableParameter.expression()));
simpleVar.flagSet.add(Flag.DEFAULTABLE_PARAM);
simpleVar.pos = getPosition(defaultableParameter);
return simpleVar;
}
@Override
public BLangNode transform(RestParameterNode restParameter) {
BLangSimpleVariable bLSimpleVar = createSimpleVar(restParameter.paramName(), restParameter.typeName(),
restParameter.annotations());
BLangArrayType bLArrayType = (BLangArrayType) TreeBuilder.createArrayTypeNode();
bLArrayType.elemtype = bLSimpleVar.typeNode;
bLArrayType.dimensions = 1;
bLSimpleVar.typeNode = bLArrayType;
bLArrayType.pos = getPosition(restParameter.typeName());
bLSimpleVar.flagSet.add(Flag.REST_PARAM);
bLSimpleVar.pos = getPosition(restParameter);
return bLSimpleVar;
}
@Override
public BLangNode transform(OptionalTypeDescriptorNode optTypeDescriptor) {
BLangValueType nilTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
nilTypeNode.pos = getPosition(optTypeDescriptor.questionMarkToken());
nilTypeNode.typeKind = TypeKind.NIL;
BLangUnionTypeNode unionTypeNode = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode();
unionTypeNode.memberTypeNodes.add(createTypeNode(optTypeDescriptor.typeDescriptor()));
unionTypeNode.memberTypeNodes.add(nilTypeNode);
unionTypeNode.nullable = true;
unionTypeNode.pos = getPosition(optTypeDescriptor);
return unionTypeNode;
}
@Override
public BLangNode transform(FunctionTypeDescriptorNode functionTypeDescriptorNode) {
BLangFunctionTypeNode functionTypeNode = (BLangFunctionTypeNode) TreeBuilder.createFunctionTypeNode();
functionTypeNode.pos = getPosition(functionTypeDescriptorNode);
functionTypeNode.returnsKeywordExists = true;
if (functionTypeDescriptorNode.functionSignature().isPresent()) {
FunctionSignatureNode funcSignature = functionTypeDescriptorNode.functionSignature().get();
for (ParameterNode child : funcSignature.parameters()) {
SimpleVariableNode param = (SimpleVariableNode) child.apply(this);
if (child.kind() == SyntaxKind.REST_PARAM) {
functionTypeNode.restParam = (BLangSimpleVariable) param;
} else {
functionTypeNode.params.add((BLangVariable) param);
}
}
Optional<ReturnTypeDescriptorNode> retNode = funcSignature.returnTypeDesc();
if (retNode.isPresent()) {
ReturnTypeDescriptorNode returnType = retNode.get();
functionTypeNode.returnTypeNode = createTypeNode(returnType.type());
} else {
BLangValueType bLValueType = (BLangValueType) TreeBuilder.createValueTypeNode();
bLValueType.pos = getPosition(funcSignature);
bLValueType.typeKind = TypeKind.NIL;
functionTypeNode.returnTypeNode = bLValueType;
}
} else {
functionTypeNode.flagSet.add(Flag.ANY_FUNCTION);
}
functionTypeNode.flagSet.add(Flag.PUBLIC);
for (Token token : functionTypeDescriptorNode.qualifierList()) {
if (token.kind() == SyntaxKind.ISOLATED_KEYWORD) {
functionTypeNode.flagSet.add(Flag.ISOLATED);
} else if (token.kind() == SyntaxKind.TRANSACTIONAL_KEYWORD) {
functionTypeNode.flagSet.add(Flag.TRANSACTIONAL);
}
}
return functionTypeNode;
}
@Override
public BLangNode transform(ParameterizedTypeDescriptorNode parameterizedTypeDescNode) {
BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
BLangBuiltInRefTypeNode typeNode =
(BLangBuiltInRefTypeNode) createBuiltInTypeNode(parameterizedTypeDescNode.parameterizedType());
refType.typeKind = typeNode.typeKind;
refType.pos = typeNode.pos;
BLangConstrainedType constrainedType = (BLangConstrainedType) TreeBuilder.createConstrainedTypeNode();
constrainedType.type = refType;
constrainedType.constraint = createTypeNode(parameterizedTypeDescNode.typeParameter().typeNode());
constrainedType.pos = getPosition(parameterizedTypeDescNode);
return constrainedType;
}
@Override
public BLangNode transform(KeySpecifierNode keySpecifierNode) {
BLangTableKeySpecifier tableKeySpecifierNode =
(BLangTableKeySpecifier) TreeBuilder.createTableKeySpecifierNode();
tableKeySpecifierNode.pos = getPosition(keySpecifierNode);
for (Token field : keySpecifierNode.fieldNames()) {
tableKeySpecifierNode.addFieldNameIdentifier(createIdentifier(field));
}
return tableKeySpecifierNode;
}
@Override
public BLangNode transform(KeyTypeConstraintNode keyTypeConstraintNode) {
BLangTableKeyTypeConstraint tableKeyTypeConstraint = new BLangTableKeyTypeConstraint();
tableKeyTypeConstraint.pos = getPosition(keyTypeConstraintNode);
tableKeyTypeConstraint.keyType = createTypeNode(keyTypeConstraintNode.typeParameterNode());
return tableKeyTypeConstraint;
}
@Override
public BLangNode transform(TableTypeDescriptorNode tableTypeDescriptorNode) {
BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
refType.typeKind = TreeUtils.stringToTypeKind(tableTypeDescriptorNode.tableKeywordToken().text());
refType.pos = getPosition(tableTypeDescriptorNode);
BLangTableTypeNode tableTypeNode = (BLangTableTypeNode) TreeBuilder.createTableTypeNode();
tableTypeNode.pos = getPosition(tableTypeDescriptorNode);
tableTypeNode.type = refType;
tableTypeNode.constraint = createTypeNode(tableTypeDescriptorNode.rowTypeParameterNode());
if (tableTypeDescriptorNode.keyConstraintNode().isPresent()) {
Node constraintNode = tableTypeDescriptorNode.keyConstraintNode().get();
if (constraintNode.kind() == SyntaxKind.KEY_TYPE_CONSTRAINT) {
tableTypeNode.tableKeyTypeConstraint = (BLangTableKeyTypeConstraint) constraintNode.apply(this);
} else if (constraintNode.kind() == SyntaxKind.KEY_SPECIFIER) {
tableTypeNode.tableKeySpecifier = (BLangTableKeySpecifier) constraintNode.apply(this);
}
}
tableTypeNode.isTypeInlineDefined = checkIfAnonymous(tableTypeDescriptorNode);
return tableTypeNode;
}
@Override
public BLangNode transform(SimpleNameReferenceNode simpleNameRefNode) {
BLangUserDefinedType bLUserDefinedType = new BLangUserDefinedType();
bLUserDefinedType.pos = getPosition(simpleNameRefNode);
bLUserDefinedType.typeName =
createIdentifier(simpleNameRefNode.name());
bLUserDefinedType.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
return bLUserDefinedType;
}
@Override
public BLangNode transform(QualifiedNameReferenceNode qualifiedNameReferenceNode) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = getPosition(qualifiedNameReferenceNode);
varRef.variableName = createIdentifier(qualifiedNameReferenceNode.identifier());
varRef.pkgAlias = createIdentifier(qualifiedNameReferenceNode.modulePrefix());
return varRef;
}
@Override
public BLangNode transform(XMLProcessingInstruction xmlProcessingInstruction) {
BLangXMLProcInsLiteral xmlProcInsLiteral =
(BLangXMLProcInsLiteral) TreeBuilder.createXMLProcessingIntsructionLiteralNode();
if (xmlProcessingInstruction.data().isEmpty()) {
BLangLiteral emptyLiteral = createEmptyLiteral();
emptyLiteral.pos = getPosition(xmlProcessingInstruction);
xmlProcInsLiteral.dataFragments.add(emptyLiteral);
} else {
for (Node dataNode : xmlProcessingInstruction.data()) {
xmlProcInsLiteral.dataFragments.add(createExpression(dataNode));
}
}
XMLNameNode target = xmlProcessingInstruction.target();
if (target.kind() == SyntaxKind.XML_SIMPLE_NAME) {
xmlProcInsLiteral.target = createSimpleLiteral(((XMLSimpleNameNode) target).name());
} else {
xmlProcInsLiteral.target = createSimpleLiteral(((XMLQualifiedNameNode) target).prefix());
}
xmlProcInsLiteral.pos = getPosition(xmlProcessingInstruction);
return xmlProcInsLiteral;
}
@Override
public BLangNode transform(XMLComment xmlComment) {
BLangXMLCommentLiteral xmlCommentLiteral = (BLangXMLCommentLiteral) TreeBuilder.createXMLCommentLiteralNode();
Location pos = getPosition(xmlComment);
if (xmlComment.content().isEmpty()) {
BLangLiteral emptyLiteral = createEmptyLiteral();
emptyLiteral.pos = pos;
xmlCommentLiteral.textFragments.add(emptyLiteral);
} else {
for (Node commentNode : xmlComment.content()) {
xmlCommentLiteral.textFragments.add(createExpression(commentNode));
}
}
xmlCommentLiteral.pos = pos;
return xmlCommentLiteral;
}
@Override
public BLangNode transform(XMLElementNode xmlElementNode) {
BLangXMLElementLiteral xmlElement = (BLangXMLElementLiteral) TreeBuilder.createXMLElementLiteralNode();
xmlElement.startTagName = createExpression(xmlElementNode.startTag());
xmlElement.endTagName = createExpression(xmlElementNode.endTag());
for (Node node : xmlElementNode.content()) {
if (node.kind() == SyntaxKind.XML_TEXT) {
xmlElement.children.add(createSimpleLiteral(((XMLTextNode) node).content()));
continue;
}
xmlElement.children.add(createExpression(node));
}
for (XMLAttributeNode attribute : xmlElementNode.startTag().attributes()) {
xmlElement.attributes.add((BLangXMLAttribute) attribute.apply(this));
}
xmlElement.pos = getPosition(xmlElementNode);
xmlElement.isRoot = true;
return xmlElement;
}
@Override
public BLangNode transform(XMLAttributeNode xmlAttributeNode) {
BLangXMLAttribute xmlAttribute = (BLangXMLAttribute) TreeBuilder.createXMLAttributeNode();
xmlAttribute.value = (BLangXMLQuotedString) xmlAttributeNode.value().apply(this);
xmlAttribute.name = createExpression(xmlAttributeNode.attributeName());
xmlAttribute.pos = getPosition(xmlAttributeNode);
return xmlAttribute;
}
@Override
public BLangNode transform(ByteArrayLiteralNode byteArrayLiteralNode) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.pos = getPosition(byteArrayLiteralNode);
literal.type = symTable.getTypeFromTag(TypeTags.BYTE_ARRAY);
literal.type.tag = TypeTags.BYTE_ARRAY;
literal.value = getValueFromByteArrayNode(byteArrayLiteralNode);
literal.originalValue = String.valueOf(literal.value);
return literal;
}
@Override
public BLangNode transform(XMLAttributeValue xmlAttributeValue) {
BLangXMLQuotedString quotedString = (BLangXMLQuotedString) TreeBuilder.createXMLQuotedStringNode();
quotedString.pos = getPosition(xmlAttributeValue);
if (xmlAttributeValue.startQuote().kind() == SyntaxKind.SINGLE_QUOTE_TOKEN) {
quotedString.quoteType = QuoteType.SINGLE_QUOTE;
} else {
quotedString.quoteType = QuoteType.DOUBLE_QUOTE;
}
if (xmlAttributeValue.value().isEmpty()) {
BLangLiteral emptyLiteral = createEmptyLiteral();
emptyLiteral.pos = getPosition(xmlAttributeValue);
quotedString.textFragments.add(emptyLiteral);
} else if (xmlAttributeValue.value().size() == 1 &&
xmlAttributeValue.value().get(0).kind() == SyntaxKind.INTERPOLATION) {
quotedString.textFragments.add(createExpression(xmlAttributeValue.value().get(0)));
BLangLiteral emptyLiteral = createEmptyLiteral();
emptyLiteral.pos = getPosition(xmlAttributeValue);
quotedString.textFragments.add(emptyLiteral);
} else {
for (Node value : xmlAttributeValue.value()) {
quotedString.textFragments.add(createExpression(value));
}
}
return quotedString;
}
@Override
public BLangNode transform(XMLStartTagNode startTagNode) {
return startTagNode.name().apply(this);
}
@Override
public BLangNode transform(XMLEndTagNode endTagNode) {
return endTagNode.name().apply(this);
}
@Override
public BLangNode transform(XMLTextNode xmlTextNode) {
return createExpression(xmlTextNode.content());
}
private BLangNode createXMLLiteral(TemplateExpressionNode expressionNode) {
BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode();
if (expressionNode.content().isEmpty()) {
xmlTextLiteral.pos = getPosition(expressionNode);
xmlTextLiteral.textFragments.add(createEmptyStringLiteral(xmlTextLiteral.pos));
return xmlTextLiteral;
}
xmlTextLiteral.pos = getPosition(expressionNode.content().get(0));
for (Node node : expressionNode.content()) {
xmlTextLiteral.textFragments.add(createExpression(node));
}
return xmlTextLiteral;
}
@Override
public BLangNode transform(XMLNamespaceDeclarationNode xmlnsDeclNode) {
BLangXMLNS xmlns = (BLangXMLNS) TreeBuilder.createXMLNSNode();
BLangIdentifier prefixIdentifier = createIdentifier(xmlnsDeclNode.namespacePrefix().orElse(null));
BLangExpression namespaceUri = createExpression(xmlnsDeclNode.namespaceuri());
xmlns.namespaceURI = namespaceUri;
xmlns.prefix = prefixIdentifier;
xmlns.pos = getPosition(xmlnsDeclNode);
BLangXMLNSStatement xmlnsStmt = (BLangXMLNSStatement) TreeBuilder.createXMLNSDeclrStatementNode();
xmlnsStmt.xmlnsDecl = xmlns;
xmlnsStmt.pos = getPosition(xmlnsDeclNode);
return xmlnsStmt;
}
@Override
public BLangNode transform(ModuleXMLNamespaceDeclarationNode xmlnsDeclNode) {
BLangXMLNS xmlns = (BLangXMLNS) TreeBuilder.createXMLNSNode();
BLangIdentifier prefixIdentifier = createIdentifier(xmlnsDeclNode.namespacePrefix().orElse(null));
BLangExpression namespaceUri = createExpression(xmlnsDeclNode.namespaceuri());
xmlns.namespaceURI = namespaceUri;
xmlns.prefix = prefixIdentifier;
xmlns.pos = getPosition(xmlnsDeclNode);
return xmlns;
}
@Override
public BLangNode transform(XMLQualifiedNameNode xmlQualifiedNameNode) {
BLangXMLQName xmlName = (BLangXMLQName) TreeBuilder.createXMLQNameNode();
xmlName.localname = createIdentifier(getPosition(xmlQualifiedNameNode.name()),
xmlQualifiedNameNode.name().name());
xmlName.prefix = createIdentifier(getPosition(xmlQualifiedNameNode.prefix()),
xmlQualifiedNameNode.prefix().name());
xmlName.pos = getPosition(xmlQualifiedNameNode);
return xmlName;
}
@Override
public BLangNode transform(XMLSimpleNameNode xmlSimpleNameNode) {
BLangXMLQName xmlName = (BLangXMLQName) TreeBuilder.createXMLQNameNode();
xmlName.localname = createIdentifier(xmlSimpleNameNode.name());
xmlName.prefix = createIdentifier(null, "");
xmlName.pos = getPosition(xmlSimpleNameNode);
return xmlName;
}
@Override
public BLangNode transform(XMLEmptyElementNode xMLEmptyElementNode) {
BLangXMLElementLiteral xmlEmptyElement = (BLangXMLElementLiteral) TreeBuilder.createXMLElementLiteralNode();
xmlEmptyElement.startTagName = createExpression(xMLEmptyElementNode.name());
for (XMLAttributeNode attribute : xMLEmptyElementNode.attributes()) {
xmlEmptyElement.attributes.add((BLangXMLAttribute) attribute.apply(this));
}
xmlEmptyElement.pos = getPosition(xMLEmptyElementNode);
return xmlEmptyElement;
}
@Override
public BLangNode transform(RemoteMethodCallActionNode remoteMethodCallActionNode) {
BLangInvocation.BLangActionInvocation bLangActionInvocation = (BLangInvocation.BLangActionInvocation)
TreeBuilder.createActionInvocation();
bLangActionInvocation.expr = createExpression(remoteMethodCallActionNode.expression());
bLangActionInvocation.argExprs = applyAll(remoteMethodCallActionNode.arguments());
BLangNameReference nameReference = createBLangNameReference(remoteMethodCallActionNode.methodName().name());
bLangActionInvocation.name = (BLangIdentifier) nameReference.name;
bLangActionInvocation.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
bLangActionInvocation.pos = getPosition(remoteMethodCallActionNode);
return bLangActionInvocation;
}
@Override
public BLangNode transform(StreamTypeDescriptorNode streamTypeDescriptorNode) {
BLangType constraint, error = null;
Location pos = getPosition(streamTypeDescriptorNode);
Optional<Node> paramsNode = streamTypeDescriptorNode.streamTypeParamsNode();
boolean hasConstraint = paramsNode.isPresent();
if (!hasConstraint) {
constraint = addValueType(pos, TypeKind.ANY);
} else {
StreamTypeParamsNode params = (StreamTypeParamsNode) paramsNode.get();
if (params.rightTypeDescNode().isPresent()) {
error = createTypeNode(params.rightTypeDescNode().get());
}
constraint = createTypeNode(params.leftTypeDescNode());
}
BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
refType.typeKind = TypeKind.STREAM;
refType.pos = pos;
BLangStreamType streamType = (BLangStreamType) TreeBuilder.createStreamTypeNode();
streamType.type = refType;
streamType.constraint = constraint;
streamType.error = error;
streamType.pos = pos;
return streamType;
}
@Override
public BLangNode transform(ArrayTypeDescriptorNode arrayTypeDescriptorNode) {
int dimensions = 1;
List<BLangExpression> sizes = new ArrayList<>();
Location position = getPosition(arrayTypeDescriptorNode);
while (true) {
if (!arrayTypeDescriptorNode.arrayLength().isPresent()) {
sizes.add(new BLangLiteral(Integer.valueOf(OPEN_ARRAY_INDICATOR), symTable.intType));
} else {
Node keyExpr = arrayTypeDescriptorNode.arrayLength().get();
if (keyExpr.kind() == SyntaxKind.NUMERIC_LITERAL) {
BasicLiteralNode numericLiteralNode = (BasicLiteralNode) keyExpr;
if (numericLiteralNode.literalToken().kind() == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN) {
sizes.add(new BLangLiteral(Integer.parseInt(keyExpr.toString()), symTable.intType));
} else {
sizes.add(new BLangLiteral(Integer.parseInt(keyExpr.toString(), 16), symTable.intType));
}
} else if (keyExpr.kind() == SyntaxKind.ASTERISK_LITERAL) {
sizes.add(new BLangLiteral(Integer.valueOf(INFERRED_ARRAY_INDICATOR), symTable.intType));
} else {
sizes.add(createExpression(keyExpr));
}
}
if (arrayTypeDescriptorNode.memberTypeDesc().kind() != SyntaxKind.ARRAY_TYPE_DESC) {
break;
}
arrayTypeDescriptorNode = (ArrayTypeDescriptorNode) arrayTypeDescriptorNode.memberTypeDesc();
dimensions++;
}
BLangArrayType arrayTypeNode = (BLangArrayType) TreeBuilder.createArrayTypeNode();
arrayTypeNode.pos = position;
arrayTypeNode.elemtype = createTypeNode(arrayTypeDescriptorNode.memberTypeDesc());
arrayTypeNode.dimensions = dimensions;
arrayTypeNode.sizes = sizes.toArray(new BLangExpression[0]);
return arrayTypeNode;
}
public BLangNode transform(EnumDeclarationNode enumDeclarationNode) {
Boolean publicQualifier = false;
if (enumDeclarationNode.qualifier().isPresent() && enumDeclarationNode.qualifier().get().kind()
== SyntaxKind.PUBLIC_KEYWORD) {
publicQualifier = true;
}
for (Node member : enumDeclarationNode.enumMemberList()) {
addToTop(transformEnumMember((EnumMemberNode) member, publicQualifier));
}
BLangTypeDefinition bLangTypeDefinition = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
if (publicQualifier) {
bLangTypeDefinition.flagSet.add(Flag.PUBLIC);
}
bLangTypeDefinition.flagSet.add(Flag.ENUM);
bLangTypeDefinition.setName((BLangIdentifier) transform(enumDeclarationNode.identifier()));
bLangTypeDefinition.pos = getPosition(enumDeclarationNode);
BLangUnionTypeNode bLangUnionTypeNode = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode();
bLangUnionTypeNode.pos = bLangTypeDefinition.pos;
for (Node member : enumDeclarationNode.enumMemberList()) {
bLangUnionTypeNode.memberTypeNodes.add(createTypeNode(((EnumMemberNode) member).identifier()));
}
Collections.reverse(bLangUnionTypeNode.memberTypeNodes);
bLangTypeDefinition.setTypeNode(bLangUnionTypeNode);
bLangTypeDefinition.annAttachments = applyAll(getAnnotations(enumDeclarationNode.metadata()));
bLangTypeDefinition.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(enumDeclarationNode.metadata()));
return bLangTypeDefinition;
}
public BLangConstant transformEnumMember(EnumMemberNode member, Boolean publicQualifier) {
BLangConstant bLangConstant = (BLangConstant) TreeBuilder.createConstantNode();
bLangConstant.pos = getPosition(member);
bLangConstant.flagSet.add(Flag.CONSTANT);
if (publicQualifier) {
bLangConstant.flagSet.add(Flag.PUBLIC);
}
bLangConstant.annAttachments = applyAll(getAnnotations(member.metadata()));
bLangConstant.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(member.metadata()));
bLangConstant.setName((BLangIdentifier) transform(member.identifier()));
BLangExpression deepLiteral;
if (member.constExprNode().isPresent()) {
BLangExpression expression = createExpression(member.constExprNode().orElse(null));
bLangConstant.setInitialExpression(expression);
deepLiteral = createExpression(member.constExprNode().orElse(null));
} else {
BLangLiteral literal = createSimpleLiteral(member.identifier());
bLangConstant.setInitialExpression(literal);
deepLiteral = createSimpleLiteral(member.identifier());
}
BLangValueType typeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
typeNode.typeKind = TypeKind.STRING;
bLangConstant.setTypeNode(typeNode);
if (deepLiteral instanceof BLangLiteral) {
BLangLiteral literal = (BLangLiteral) deepLiteral;
if (literal.originalValue != "") {
BLangFiniteTypeNode typeNodeAssociated = (BLangFiniteTypeNode) TreeBuilder.createFiniteTypeNode();
literal.originalValue = null;
typeNodeAssociated.addValue(deepLiteral);
bLangConstant.associatedTypeDefinition = createTypeDefinitionWithTypeNode(typeNodeAssociated);
} else {
bLangConstant.associatedTypeDefinition = null;
}
} else {
BLangFiniteTypeNode typeNodeAssociated = (BLangFiniteTypeNode) TreeBuilder.createFiniteTypeNode();
typeNodeAssociated.addValue(deepLiteral);
bLangConstant.associatedTypeDefinition = createTypeDefinitionWithTypeNode(typeNodeAssociated);
}
return bLangConstant;
}
@Override
public BLangNode transform(QueryExpressionNode queryExprNode) {
BLangQueryExpr queryExpr = (BLangQueryExpr) TreeBuilder.createQueryExpressionNode();
queryExpr.pos = getPosition(queryExprNode);
BLangFromClause fromClause = (BLangFromClause) queryExprNode.queryPipeline().fromClause().apply(this);
queryExpr.queryClauseList.add(fromClause);
for (Node clauseNode : queryExprNode.queryPipeline().intermediateClauses()) {
queryExpr.queryClauseList.add(clauseNode.apply(this));
}
BLangSelectClause selectClause = (BLangSelectClause) queryExprNode.selectClause().apply(this);
queryExpr.queryClauseList.add(selectClause);
Optional<OnConflictClauseNode> onConflict = queryExprNode.onConflictClause();
onConflict.ifPresent(onConflictClauseNode -> queryExpr.queryClauseList.add(onConflictClauseNode.apply(this)));
boolean isTable = false;
boolean isStream = false;
Optional<QueryConstructTypeNode> optionalQueryConstructTypeNode = queryExprNode.queryConstructType();
if (optionalQueryConstructTypeNode.isPresent()) {
QueryConstructTypeNode queryConstructTypeNode = optionalQueryConstructTypeNode.get();
isTable = queryConstructTypeNode.keyword().kind() == SyntaxKind.TABLE_KEYWORD;
isStream = queryConstructTypeNode.keyword().kind() == SyntaxKind.STREAM_KEYWORD;
if (queryConstructTypeNode.keySpecifier().isPresent()) {
for (IdentifierToken fieldNameNode : queryConstructTypeNode.keySpecifier().get().fieldNames()) {
queryExpr.fieldNameIdentifierList.add(createIdentifier(getPosition(fieldNameNode), fieldNameNode));
}
}
}
queryExpr.isStream = isStream;
queryExpr.isTable = isTable;
return queryExpr;
}
public BLangNode transform(OnFailClauseNode onFailClauseNode) {
Location pos = getPosition(onFailClauseNode);
BLangSimpleVariableDef variableDefinitionNode = (BLangSimpleVariableDef) TreeBuilder.
createSimpleVariableDefinitionNode();
BLangSimpleVariable var = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
boolean isDeclaredWithVar = onFailClauseNode.typeDescriptor().kind() == SyntaxKind.VAR_TYPE_DESC;
var.isDeclaredWithVar = isDeclaredWithVar;
if (!isDeclaredWithVar) {
var.setTypeNode(createTypeNode(onFailClauseNode.typeDescriptor()));
}
var.pos = getPosition(onFailClauseNode);
var.setName(this.createIdentifier(onFailClauseNode.failErrorName()));
var.name.pos = getPosition(onFailClauseNode.failErrorName());
variableDefinitionNode.setVariable(var);
variableDefinitionNode.pos = var.name.pos;
BLangOnFailClause onFailClause = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode();
onFailClause.pos = pos;
onFailClause.isDeclaredWithVar = isDeclaredWithVar;
markVariableWithFlag(variableDefinitionNode.getVariable(), Flag.FINAL);
onFailClause.variableDefinitionNode = variableDefinitionNode;
BLangBlockStmt blockNode = (BLangBlockStmt) transform(onFailClauseNode.blockStatement());
blockNode.pos = getPosition(onFailClauseNode);
onFailClause.body = blockNode;
return onFailClause;
}
@Override
public BLangNode transform(LetClauseNode letClauseNode) {
BLangLetClause bLLetClause = (BLangLetClause) TreeBuilder.createLetClauseNode();
bLLetClause.pos = getPosition(letClauseNode);
List<BLangLetVariable> letVars = new ArrayList<>();
for (LetVariableDeclarationNode letVarDeclr : letClauseNode.letVarDeclarations()) {
BLangLetVariable letVar = createLetVariable(letVarDeclr);
letVar.definitionNode.getVariable().addFlag(Flag.FINAL);
letVars.add(letVar);
}
if (!letVars.isEmpty()) {
bLLetClause.letVarDeclarations = letVars;
}
return bLLetClause;
}
@Override
public BLangNode transform(FromClauseNode fromClauseNode) {
BLangFromClause fromClause = (BLangFromClause) TreeBuilder.createFromClauseNode();
fromClause.pos = getPosition(fromClauseNode);
fromClause.collection = createExpression(fromClauseNode.expression());
TypedBindingPatternNode bindingPatternNode = fromClauseNode.typedBindingPattern();
fromClause.variableDefinitionNode = createBLangVarDef(getPosition(bindingPatternNode), bindingPatternNode,
Optional.empty(), Optional.empty());
boolean isDeclaredWithVar = bindingPatternNode.typeDescriptor().kind() == SyntaxKind.VAR_TYPE_DESC;
fromClause.isDeclaredWithVar = isDeclaredWithVar;
return fromClause;
}
@Override
public BLangNode transform(WhereClauseNode whereClauseNode) {
BLangWhereClause whereClause = (BLangWhereClause) TreeBuilder.createWhereClauseNode();
whereClause.pos = getPosition(whereClauseNode);
whereClause.expression = createExpression(whereClauseNode.expression());
return whereClause;
}
@Override
public BLangNode transform(SelectClauseNode selectClauseNode) {
BLangSelectClause selectClause = (BLangSelectClause) TreeBuilder.createSelectClauseNode();
selectClause.pos = getPosition(selectClauseNode);
selectClause.expression = createExpression(selectClauseNode.expression());
return selectClause;
}
@Override
public BLangNode transform(OnConflictClauseNode onConflictClauseNode) {
BLangOnConflictClause onConflictClause = (BLangOnConflictClause) TreeBuilder.createOnConflictClauseNode();
onConflictClause.pos = getPosition(onConflictClauseNode);
onConflictClause.expression = createExpression(onConflictClauseNode.expression());
return onConflictClause;
}
@Override
public BLangNode transform(LimitClauseNode limitClauseNode) {
BLangLimitClause selectClause = (BLangLimitClause) TreeBuilder.createLimitClauseNode();
selectClause.pos = getPosition(limitClauseNode);
selectClause.expression = createExpression(limitClauseNode.expression());
return selectClause;
}
@Override
public BLangNode transform(OnClauseNode onClauseNode) {
BLangOnClause onClause = (BLangOnClause) TreeBuilder.createOnClauseNode();
onClause.pos = getPosition(onClauseNode);
onClause.lhsExpr = createExpression(onClauseNode.lhsExpression());
onClause.rhsExpr = createExpression(onClauseNode.rhsExpression());
return onClause;
}
@Override
public BLangNode transform(JoinClauseNode joinClauseNode) {
BLangJoinClause joinClause = (BLangJoinClause) TreeBuilder.createJoinClauseNode();
joinClause.pos = getPosition(joinClauseNode);
TypedBindingPatternNode typedBindingPattern = joinClauseNode.typedBindingPattern();
joinClause.variableDefinitionNode = createBLangVarDef(getPosition(joinClauseNode),
typedBindingPattern, Optional.empty(), Optional.empty());
joinClause.collection = createExpression(joinClauseNode.expression());
joinClause.isDeclaredWithVar = typedBindingPattern.typeDescriptor().kind() == SyntaxKind.VAR_TYPE_DESC;
joinClause.isOuterJoin = joinClauseNode.outerKeyword().isPresent();
OnClauseNode onClauseNode = joinClauseNode.joinOnCondition();
BLangOnClause onClause = (BLangOnClause) TreeBuilder.createOnClauseNode();
onClause.pos = getPosition(onClauseNode);
if (!onClauseNode.equalsKeyword().isMissing()) {
onClause.equalsKeywordPos = getPosition(onClauseNode.equalsKeyword());
}
onClause.lhsExpr = createExpression(onClauseNode.lhsExpression());
onClause.rhsExpr = createExpression(onClauseNode.rhsExpression());
joinClause.onClause = onClause;
return joinClause;
}
@Override
public BLangNode transform(OrderByClauseNode orderByClauseNode) {
BLangOrderByClause orderByClause = (BLangOrderByClause) TreeBuilder.createOrderByClauseNode();
orderByClause.pos = getPosition(orderByClauseNode);
for (OrderKeyNode orderKeyNode : orderByClauseNode.orderKey()) {
orderByClause.addOrderKey(createOrderKey(orderKeyNode));
}
return orderByClause;
}
public BLangOrderKey createOrderKey(OrderKeyNode orderKeyNode) {
BLangOrderKey orderKey = (BLangOrderKey) TreeBuilder.createOrderKeyNode();
orderKey.pos = getPosition(orderKeyNode);
orderKey.expression = createExpression(orderKeyNode.expression());
if (orderKeyNode.orderDirection().isPresent() &&
orderKeyNode.orderDirection().get().text().equals("descending")) {
orderKey.isAscending = false;
} else {
orderKey.isAscending = true;
}
return orderKey;
}
@Override
public BLangNode transform(IntersectionTypeDescriptorNode intersectionTypeDescriptorNode) {
BLangType lhsType = (BLangType) createTypeNode(intersectionTypeDescriptorNode.leftTypeDesc());
BLangType rhsType = (BLangType) createTypeNode(intersectionTypeDescriptorNode.rightTypeDesc());
BLangIntersectionTypeNode intersectionType;
if (rhsType.getKind() == NodeKind.INTERSECTION_TYPE_NODE) {
intersectionType = (BLangIntersectionTypeNode) rhsType;
intersectionType.constituentTypeNodes.add(0, lhsType);
} else if (lhsType.getKind() == NodeKind.INTERSECTION_TYPE_NODE) {
intersectionType = (BLangIntersectionTypeNode) lhsType;
intersectionType.constituentTypeNodes.add(rhsType);
} else {
intersectionType = (BLangIntersectionTypeNode) TreeBuilder.createIntersectionTypeNode();
intersectionType.constituentTypeNodes.add(lhsType);
intersectionType.constituentTypeNodes.add(rhsType);
}
intersectionType.pos = getPosition(intersectionTypeDescriptorNode);
return intersectionType;
}
@Override
protected BLangNode transformSyntaxNode(Node node) {
throw new RuntimeException("Node not supported: " + node.getClass().getSimpleName());
}
@Override
public BLangNode transform(ServiceDeclarationNode serviceDeclarationNode) {
Location pos = getPositionWithoutMetadata(serviceDeclarationNode);
BLangClassDefinition annonClassDef = transformObjectCtorExpressionBody(serviceDeclarationNode.members());
annonClassDef.isServiceDecl = true;
annonClassDef.pos = pos;
annonClassDef.flagSet.add(SERVICE);
List<IdentifierNode> absResourcePathPath = new ArrayList<>();
NodeList<Node> pathList = serviceDeclarationNode.absoluteResourcePath();
BLangLiteral serviceNameLiteral = null;
if (pathList.size() == 1 && pathList.get(0).kind() == SyntaxKind.STRING_LITERAL) {
serviceNameLiteral = (BLangLiteral) createExpression(pathList.get(0));
} else {
for (var token : pathList) {
String text = ((Token) token).text();
if (pathList.size() == 1 && text.equals("/")) {
absResourcePathPath.add(createIdentifier((Token) token));
} else if (!text.equals("/")) {
absResourcePathPath.add(createIdentifier((Token) token));
}
}
}
String genName = anonymousModelHelper.getNextAnonymousTypeKey(packageID);
IdentifierNode anonTypeGenName = createIdentifier(pos, genName);
annonClassDef.setName(anonTypeGenName);
annonClassDef.flagSet.add(Flag.PUBLIC);
Optional<TypeDescriptorNode> typeReference = serviceDeclarationNode.typeDescriptor();
typeReference.ifPresent(typeReferenceNode -> {
BLangType typeNode = createTypeNode(typeReferenceNode);
annonClassDef.typeRefs.add(typeNode);
});
annonClassDef.annAttachments = applyAll(getAnnotations(serviceDeclarationNode.metadata()));
annonClassDef.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(serviceDeclarationNode.metadata()));
addToTop(annonClassDef);
BLangIdentifier identifier = (BLangIdentifier) TreeBuilder.createIdentifierNode();
BLangUserDefinedType userDefinedType = createUserDefinedType(pos, identifier, annonClassDef.name);
BLangTypeInit initNode = (BLangTypeInit) TreeBuilder.createInitNode();
initNode.pos = pos;
initNode.userDefinedType = userDefinedType;
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier pkgAlias = createIdentifier(pos, "");
BLangNameReference nameReference = new BLangNameReference(pos, null, pkgAlias, createIdentifier(pos, genName));
invocationNode.name = (BLangIdentifier) nameReference.name;
invocationNode.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
initNode.argsExpr.addAll(invocationNode.argExprs);
initNode.initInvocation = invocationNode;
BLangSimpleVariable serviceVariable = createServiceVariable(pos, annonClassDef, initNode);
List<BLangExpression> exprs = new ArrayList<>();
for (var exp : serviceDeclarationNode.expressions()) {
exprs.add(createExpression(exp));
}
BLangService service = (BLangService) TreeBuilder.createServiceNode();
service.serviceVariable = serviceVariable;
service.attachedExprs = exprs;
service.serviceClass = annonClassDef;
service.absoluteResourcePath = absResourcePathPath;
service.serviceNameLiteral = serviceNameLiteral;
service.annAttachments = annonClassDef.annAttachments;
service.pos = pos;
service.name = createIdentifier(pos, anonymousModelHelper.getNextAnonymousServiceVarKey(packageID));
return service;
}
private BLangSimpleVariable createServiceVariable(Location pos, BLangClassDefinition annonClassDef,
BLangTypeInit initNode) {
BLangUserDefinedType typeName = createUserDefinedType(pos,
(BLangIdentifier) TreeBuilder.createIdentifierNode(), annonClassDef.name);
BLangSimpleVariable serviceInstance =
(BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
serviceInstance.typeNode = typeName;
String serviceVarName = anonymousModelHelper.getNextAnonymousServiceVarKey(packageID);
serviceInstance.name = createIdentifier(pos, serviceVarName);
serviceInstance.expr = initNode;
serviceInstance.internal = true;
return serviceInstance;
}
@Override
public BLangNode transform(ClassDefinitionNode classDefinitionNode) {
BLangClassDefinition blangClass = (BLangClassDefinition) TreeBuilder.createClassDefNode();
blangClass.pos = getPositionWithoutMetadata(classDefinitionNode);
blangClass.annAttachments = applyAll(getAnnotations(classDefinitionNode.metadata()));
BLangIdentifier identifierNode = createIdentifier(classDefinitionNode.className());
blangClass.setName(identifierNode);
blangClass.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(classDefinitionNode.metadata()));
classDefinitionNode.visibilityQualifier().ifPresent(visibilityQual -> {
if (visibilityQual.kind() == SyntaxKind.PUBLIC_KEYWORD) {
blangClass.flagSet.add(Flag.PUBLIC);
}
});
for (Token qualifier : classDefinitionNode.classTypeQualifiers()) {
SyntaxKind kind = qualifier.kind();
switch (kind) {
case DISTINCT_KEYWORD:
blangClass.flagSet.add(Flag.DISTINCT);
break;
case CLIENT_KEYWORD:
blangClass.flagSet.add(Flag.CLIENT);
break;
case READONLY_KEYWORD:
blangClass.flagSet.add(Flag.READONLY);
break;
case SERVICE_KEYWORD:
blangClass.flagSet.add(Flag.SERVICE);
break;
case ISOLATED_KEYWORD:
blangClass.flagSet.add(Flag.ISOLATED);
break;
default:
throw new RuntimeException("Syntax kind is not supported: " + kind);
}
}
NodeList<Node> members = classDefinitionNode.members();
for (Node node : members) {
BLangNode bLangNode = node.apply(this);
if (bLangNode.getKind() == NodeKind.FUNCTION || bLangNode.getKind() == NodeKind.RESOURCE_FUNC) {
BLangFunction bLangFunction = (BLangFunction) bLangNode;
bLangFunction.attachedFunction = true;
bLangFunction.flagSet.add(Flag.ATTACHED);
if (Names.USER_DEFINED_INIT_SUFFIX.value.equals(bLangFunction.name.value)) {
if (blangClass.initFunction == null) {
bLangFunction.objInitFunction = true;
blangClass.initFunction = bLangFunction;
} else {
blangClass.addFunction(bLangFunction);
}
} else {
blangClass.addFunction(bLangFunction);
}
} else if (bLangNode.getKind() == NodeKind.VARIABLE) {
blangClass.addField((BLangSimpleVariable) bLangNode);
} else if (bLangNode.getKind() == NodeKind.USER_DEFINED_TYPE) {
blangClass.addTypeReference((BLangType) bLangNode);
}
}
return blangClass;
}
@Override
public BLangNode transform(RetryStatementNode retryStatementNode) {
BLangRetrySpec retrySpec = createRetrySpec(retryStatementNode);
Location pos = getPosition(retryStatementNode);
StatementNode retryBody = retryStatementNode.retryBody();
if (retryBody.kind() == SyntaxKind.TRANSACTION_STATEMENT) {
BLangRetryTransaction retryTransaction = (BLangRetryTransaction) TreeBuilder.createRetryTransactionNode();
retryTransaction.pos = pos;
retryTransaction.setRetrySpec(retrySpec);
retryTransaction.setTransaction((BLangTransaction) retryBody.apply(this));
return retryTransaction;
}
BLangRetry retryNode = (BLangRetry) TreeBuilder.createRetryNode();
retryNode.pos = pos;
retryNode.setRetrySpec(retrySpec);
BLangBlockStmt retryBlock = (BLangBlockStmt) retryBody.apply(this);
retryNode.setRetryBody(retryBlock);
retryStatementNode.onFailClause().ifPresent(onFailClauseNode -> {
retryNode.setOnFailClause(
(org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this)));
});
return retryNode;
}
private BLangRetrySpec createRetrySpec(RetryStatementNode retryStatementNode) {
BLangRetrySpec retrySpec = (BLangRetrySpec) TreeBuilder.createRetrySpecNode();
if (retryStatementNode.typeParameter().isPresent()) {
TypeParameterNode typeParam = retryStatementNode.typeParameter().get();
retrySpec.retryManagerType = createTypeNode(typeParam.typeNode());
retrySpec.pos = getPosition(typeParam);
}
if (retryStatementNode.arguments().isPresent()) {
ParenthesizedArgList arg = retryStatementNode.arguments().get();
retrySpec.pos = getPosition(arg);
for (Node argNode : arg.arguments()) {
retrySpec.argExprs.add(createExpression(argNode));
}
}
if (retrySpec.pos == null) {
retrySpec.pos = getPosition(retryStatementNode);
}
return retrySpec;
}
@Override
public BLangNode transform(TransactionalExpressionNode transactionalExpressionNode) {
BLangTransactionalExpr transactionalExpr = TreeBuilder.createTransactionalExpressionNode();
transactionalExpr.pos = getPosition(transactionalExpressionNode);
return transactionalExpr;
}
@Override
public BLangNode transform(XMLFilterExpressionNode xmlFilterExpressionNode) {
List<BLangXMLElementFilter> filters = new ArrayList<>();
XMLNamePatternChainingNode xmlNamePatternChainingNode = xmlFilterExpressionNode.xmlPatternChain();
for (Node node : xmlNamePatternChainingNode.xmlNamePattern()) {
filters.add(createXMLElementFilter(node));
}
BLangExpression expr = createExpression(xmlFilterExpressionNode.expression());
BLangXMLElementAccess elementAccess = new BLangXMLElementAccess(getPosition(xmlFilterExpressionNode), null,
expr, filters);
return elementAccess;
}
@Override
public BLangNode transform(XMLStepExpressionNode xmlStepExpressionNode) {
List<BLangXMLElementFilter> filters = new ArrayList<>();
int starCount = 0;
if (xmlStepExpressionNode.xmlStepStart().kind() == SyntaxKind.SLASH_ASTERISK_TOKEN) {
starCount = 1;
} else if (xmlStepExpressionNode.xmlStepStart().kind() == SyntaxKind.XML_NAME_PATTERN_CHAIN) {
XMLNamePatternChainingNode xmlNamePatternChainingNode =
(XMLNamePatternChainingNode) xmlStepExpressionNode.xmlStepStart();
for (Node node : xmlNamePatternChainingNode.xmlNamePattern()) {
filters.add(createXMLElementFilter(node));
}
switch (xmlNamePatternChainingNode.startToken().kind()) {
case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:
starCount = 2;
break;
case SLASH_ASTERISK_TOKEN:
starCount = 1;
break;
}
}
BLangExpression expr = createExpression(xmlStepExpressionNode.expression());
BLangXMLNavigationAccess xmlNavigationAccess =
new BLangXMLNavigationAccess(getPosition(xmlStepExpressionNode), null, expr, filters,
XMLNavigationAccess.NavAccessType.fromInt(starCount), null);
return xmlNavigationAccess;
}
@Override
public BLangNode transform(MatchStatementNode matchStatementNode) {
BLangMatchStatement matchStatement = (BLangMatchStatement) TreeBuilder.createMatchStatementNode();
BLangExpression matchStmtExpr = createExpression(matchStatementNode.condition());
matchStatement.setExpression(matchStmtExpr);
for (MatchClauseNode matchClauseNode : matchStatementNode.matchClauses()) {
BLangMatchClause bLangMatchClause = (BLangMatchClause) TreeBuilder.createMatchClause();
bLangMatchClause.pos = getPosition(matchClauseNode);
bLangMatchClause.expr = matchStmtExpr;
boolean matchGuardAvailable = false;
if (matchClauseNode.matchGuard().isPresent()) {
matchGuardAvailable = true;
BLangMatchGuard bLangMatchGuard = (BLangMatchGuard) TreeBuilder.createMatchGuard();
bLangMatchGuard.expr = createExpression(matchClauseNode.matchGuard().get().expression());
bLangMatchClause.setMatchGuard(bLangMatchGuard);
}
for (Node matchPattern : matchClauseNode.matchPatterns()) {
BLangMatchPattern bLangMatchPattern = transformMatchPattern(matchPattern);
if (bLangMatchPattern != null) {
bLangMatchPattern.matchExpr = matchStmtExpr;
bLangMatchPattern.matchGuardIsAvailable = matchGuardAvailable;
bLangMatchClause.addMatchPattern(bLangMatchPattern);
}
}
bLangMatchClause.setBlockStatement((BLangBlockStmt) transform(matchClauseNode.blockStatement()));
matchStatement.addMatchClause(bLangMatchClause);
}
matchStatementNode.onFailClause().ifPresent(onFailClauseNode -> {
matchStatement.setOnFailClause(
(org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this)));
});
return matchStatement;
}
public BLangNode createXmlTemplateLiteral(TemplateExpressionNode expressionNode) {
SyntaxKind contentKind;
if (expressionNode.content().isEmpty()) {
contentKind = SyntaxKind.XML_TEXT;
} else {
contentKind = expressionNode.content().get(0).kind();
}
switch (contentKind) {
case XML_COMMENT:
case XML_PI:
case XML_ELEMENT:
case XML_EMPTY_ELEMENT:
return createExpression(expressionNode.content().get(0));
default:
return createXMLLiteral(expressionNode);
}
}
private BLangMatchPattern transformMatchPattern(Node matchPattern) {
Location matchPatternPos = matchPattern.location();
SyntaxKind kind = matchPattern.kind();
if (kind == SyntaxKind.SIMPLE_NAME_REFERENCE &&
((SimpleNameReferenceNode) matchPattern).name().isMissing()) {
dlog.error(matchPatternPos, DiagnosticErrorCode.MATCH_PATTERN_NOT_SUPPORTED);
return null;
}
if (kind == SyntaxKind.SIMPLE_NAME_REFERENCE &&
((SimpleNameReferenceNode) matchPattern).name().text().equals("_")) {
BLangWildCardMatchPattern bLangWildCardMatchPattern =
(BLangWildCardMatchPattern) TreeBuilder.createWildCardMatchPattern();
bLangWildCardMatchPattern.pos = matchPatternPos;
return bLangWildCardMatchPattern;
}
if (kind == SyntaxKind.IDENTIFIER_TOKEN && ((IdentifierToken) matchPattern).text().equals("_")) {
BLangWildCardMatchPattern bLangWildCardMatchPattern =
(BLangWildCardMatchPattern) TreeBuilder.createWildCardMatchPattern();
bLangWildCardMatchPattern.pos = matchPatternPos;
return bLangWildCardMatchPattern;
}
if (kind == SyntaxKind.NUMERIC_LITERAL ||
kind == SyntaxKind.STRING_LITERAL ||
kind == SyntaxKind.SIMPLE_NAME_REFERENCE ||
kind == SyntaxKind.IDENTIFIER_TOKEN ||
kind == SyntaxKind.NULL_LITERAL ||
kind == SyntaxKind.NIL_LITERAL ||
kind == SyntaxKind.BOOLEAN_LITERAL) {
BLangConstPattern bLangConstMatchPattern =
(BLangConstPattern) TreeBuilder.createConstMatchPattern();
bLangConstMatchPattern.setExpression(createExpression(matchPattern));
bLangConstMatchPattern.pos = matchPatternPos;
return bLangConstMatchPattern;
}
if (kind == SyntaxKind.TYPED_BINDING_PATTERN) {
TypedBindingPatternNode typedBindingPatternNode = (TypedBindingPatternNode) matchPattern;
BLangVarBindingPatternMatchPattern bLangVarBindingPattern =
(BLangVarBindingPatternMatchPattern) TreeBuilder.createVarBindingPattern();
bLangVarBindingPattern.pos = matchPatternPos;
bLangVarBindingPattern.setBindingPattern(transformBindingPattern(typedBindingPatternNode.bindingPattern()));
return bLangVarBindingPattern;
}
if (kind == SyntaxKind.ERROR_MATCH_PATTERN) {
return transformErrorMatchPattern((ErrorMatchPatternNode) matchPattern, matchPatternPos);
}
if (kind == SyntaxKind.NAMED_ARG_MATCH_PATTERN) {
return transformNamedArgMatchPattern((NamedArgMatchPatternNode) matchPattern, matchPatternPos);
}
if (kind == SyntaxKind.LIST_MATCH_PATTERN) {
return transformListMatchPattern((ListMatchPatternNode) matchPattern, matchPatternPos);
}
if (kind == SyntaxKind.REST_MATCH_PATTERN) {
return transformRestMatchPattern((RestMatchPatternNode) matchPattern, matchPatternPos);
}
if (kind == SyntaxKind.MAPPING_MATCH_PATTERN) {
return transformMappingMatchPattern((MappingMatchPatternNode) matchPattern, matchPatternPos);
}
if (kind == SyntaxKind.FIELD_MATCH_PATTERN) {
return transformFieldMatchPattern((FieldMatchPatternNode) matchPattern, matchPatternPos);
}
dlog.error(matchPatternPos, DiagnosticErrorCode.MATCH_PATTERN_NOT_SUPPORTED);
return null;
}
private BLangErrorMatchPattern transformErrorMatchPattern(ErrorMatchPatternNode errorMatchPatternNode,
Location pos) {
BLangErrorMatchPattern bLangErrorMatchPattern =
(BLangErrorMatchPattern) TreeBuilder.createErrorMatchPattern();
bLangErrorMatchPattern.pos = pos;
NameReferenceNode nameReferenceNode;
if (errorMatchPatternNode.typeReference().isPresent()) {
nameReferenceNode = errorMatchPatternNode.typeReference().get();
bLangErrorMatchPattern.errorTypeReference = (BLangUserDefinedType) createTypeNode(nameReferenceNode);
}
if (errorMatchPatternNode.argListMatchPatternNode().size() == 0) {
return bLangErrorMatchPattern;
}
Node node = errorMatchPatternNode.argListMatchPatternNode().get(0);
if (isErrorFieldMatchPattern(node)) {
createErrorFieldMatchPatterns(0, errorMatchPatternNode, bLangErrorMatchPattern);
return bLangErrorMatchPattern;
}
bLangErrorMatchPattern.errorMessageMatchPattern = createErrorMessageMatchPattern(node);
if (errorMatchPatternNode.argListMatchPatternNode().size() == 1) {
return bLangErrorMatchPattern;
}
node = errorMatchPatternNode.argListMatchPatternNode().get(1);
if (isErrorFieldMatchPattern(node)) {
createErrorFieldMatchPatterns(1, errorMatchPatternNode, bLangErrorMatchPattern);
return bLangErrorMatchPattern;
}
bLangErrorMatchPattern.errorCauseMatchPattern = createErrorCauseMatchPattern(node);
createErrorFieldMatchPatterns(2, errorMatchPatternNode, bLangErrorMatchPattern);
return bLangErrorMatchPattern;
}
private BLangNamedArgMatchPattern transformNamedArgMatchPattern(NamedArgMatchPatternNode namedArgMatchPatternNode,
Location pos) {
BLangNamedArgMatchPattern bLangNamedArgMatchPattern =
(BLangNamedArgMatchPattern) TreeBuilder.createNamedArgMatchPattern();
bLangNamedArgMatchPattern.argName = createIdentifier(namedArgMatchPatternNode.identifier());
bLangNamedArgMatchPattern.matchPattern = transformMatchPattern(namedArgMatchPatternNode.matchPattern());
bLangNamedArgMatchPattern.pos = pos;
return bLangNamedArgMatchPattern;
}
private BLangListMatchPattern transformListMatchPattern(ListMatchPatternNode listMatchPatternNode,
Location pos) {
BLangListMatchPattern bLangListMatchPattern =
(BLangListMatchPattern) TreeBuilder.createListMatchPattern();
bLangListMatchPattern.pos = pos;
SeparatedNodeList<Node> matchPatterns = listMatchPatternNode.matchPatterns();
int matchPatternListSize = matchPatterns.size();
if (matchPatternListSize == 0) {
return bLangListMatchPattern;
}
for (int i = 0; i < matchPatternListSize - 1; i++) {
BLangMatchPattern bLangMemberMatchPattern = transformMatchPattern(matchPatterns.get(i));
if (bLangMemberMatchPattern == null) {
continue;
}
bLangListMatchPattern.addMatchPattern(bLangMemberMatchPattern);
}
BLangMatchPattern lastMember = transformMatchPattern(matchPatterns.get(matchPatternListSize - 1));
if (lastMember.getKind() == NodeKind.REST_MATCH_PATTERN) {
bLangListMatchPattern.setRestMatchPattern((BLangRestMatchPattern) lastMember);
} else {
bLangListMatchPattern.addMatchPattern(lastMember);
}
return bLangListMatchPattern;
}
private BLangRestMatchPattern transformRestMatchPattern(RestMatchPatternNode restMatchPatternNode, Location pos) {
BLangRestMatchPattern bLangRestMatchPattern = (BLangRestMatchPattern) TreeBuilder.createRestMatchPattern();
bLangRestMatchPattern.pos = pos;
SimpleNameReferenceNode variableName = restMatchPatternNode.variableName();
bLangRestMatchPattern.setIdentifier(createIdentifier(getPosition(variableName), variableName.name()));
return bLangRestMatchPattern;
}
private BLangMappingMatchPattern transformMappingMatchPattern(MappingMatchPatternNode mappingMatchPatternNode,
Location pos) {
BLangMappingMatchPattern bLangMappingMatchPattern =
(BLangMappingMatchPattern) TreeBuilder.createMappingMatchPattern();
bLangMappingMatchPattern.pos = pos;
SeparatedNodeList<Node> fieldMatchPatterns = mappingMatchPatternNode.fieldMatchPatterns();
int fieldMatchPatternListSize = fieldMatchPatterns.size();
if (fieldMatchPatternListSize == 0) {
return bLangMappingMatchPattern;
}
for (int i = 0; i < fieldMatchPatternListSize - 1; i++) {
bLangMappingMatchPattern.fieldMatchPatterns.add((BLangFieldMatchPattern)
transformMatchPattern(fieldMatchPatterns.get(i)));
}
BLangMatchPattern lastMember = transformMatchPattern(fieldMatchPatterns.get(fieldMatchPatternListSize - 1));
if (lastMember.getKind() == NodeKind.REST_MATCH_PATTERN) {
bLangMappingMatchPattern.setRestMatchPattern((BLangRestMatchPattern) lastMember);
} else {
bLangMappingMatchPattern.addFieldMatchPattern((BLangFieldMatchPattern) lastMember);
}
return bLangMappingMatchPattern;
}
private BLangFieldMatchPattern transformFieldMatchPattern(FieldMatchPatternNode fieldMatchPatternNode,
Location pos) {
BLangFieldMatchPattern bLangFieldMatchPattern =
(BLangFieldMatchPattern) TreeBuilder.createFieldMatchPattern();
bLangFieldMatchPattern.pos = pos;
bLangFieldMatchPattern.fieldName =
createIdentifier(fieldMatchPatternNode.fieldNameNode());
bLangFieldMatchPattern.matchPattern = transformMatchPattern(fieldMatchPatternNode.matchPattern());
return bLangFieldMatchPattern;
}
private BLangBindingPattern transformBindingPattern(Node bindingPattern) {
Location pos = getPosition(bindingPattern);
SyntaxKind patternKind = bindingPattern.kind();
switch (patternKind) {
case WILDCARD_BINDING_PATTERN:
return transformWildCardBindingPattern(pos);
case CAPTURE_BINDING_PATTERN:
return transformCaptureBindingPattern((CaptureBindingPatternNode) bindingPattern, pos);
case LIST_BINDING_PATTERN:
return transformListBindingPattern((ListBindingPatternNode) bindingPattern, pos);
case NAMED_ARG_BINDING_PATTERN:
return transformNamedArgBindingPattern((NamedArgBindingPatternNode) bindingPattern, pos);
case REST_BINDING_PATTERN:
return transformRestBindingPattern((RestBindingPatternNode) bindingPattern, pos);
case MAPPING_BINDING_PATTERN:
return transformMappingBindingPattern((MappingBindingPatternNode) bindingPattern, pos);
case FIELD_BINDING_PATTERN:
return transformFieldBindingPattern(bindingPattern, pos);
case ERROR_BINDING_PATTERN:
return transformErrorBindingPattern((ErrorBindingPatternNode) bindingPattern, pos);
default:
dlog.error(pos, DiagnosticErrorCode.MATCH_PATTERN_NOT_SUPPORTED);
return null;
}
}
private BLangWildCardBindingPattern transformWildCardBindingPattern(Location pos) {
BLangWildCardBindingPattern bLangWildCardBindingPattern =
(BLangWildCardBindingPattern) TreeBuilder.createWildCardBindingPattern();
bLangWildCardBindingPattern.pos = pos;
return bLangWildCardBindingPattern;
}
private BLangCaptureBindingPattern transformCaptureBindingPattern(CaptureBindingPatternNode captureBindingPattern,
Location pos) {
BLangCaptureBindingPattern bLangCaptureBindingPattern =
(BLangCaptureBindingPattern) TreeBuilder.createCaptureBindingPattern();
bLangCaptureBindingPattern.setIdentifier(createIdentifier(captureBindingPattern.variableName()));
bLangCaptureBindingPattern.pos = pos;
return bLangCaptureBindingPattern;
}
private BLangRestBindingPattern transformRestBindingPattern(RestBindingPatternNode restBindingPatternNode,
Location pos) {
BLangRestBindingPattern bLangRestBindingPattern =
(BLangRestBindingPattern) TreeBuilder.createRestBindingPattern();
bLangRestBindingPattern.pos = pos;
SimpleNameReferenceNode variableName = restBindingPatternNode.variableName();
bLangRestBindingPattern.setIdentifier(createIdentifier(getPosition(variableName), variableName.name()));
return bLangRestBindingPattern;
}
private BLangListBindingPattern transformListBindingPattern(ListBindingPatternNode listBindingPatternNode,
Location pos) {
BLangListBindingPattern bLangListBindingPattern =
(BLangListBindingPattern) TreeBuilder.createListBindingPattern();
bLangListBindingPattern.pos = pos;
for (Node listMemberBindingPattern : listBindingPatternNode.bindingPatterns()) {
if (listMemberBindingPattern.kind() != SyntaxKind.REST_BINDING_PATTERN) {
bLangListBindingPattern.addBindingPattern(transformBindingPattern(listMemberBindingPattern));
continue;
}
bLangListBindingPattern.restBindingPattern =
(BLangRestBindingPattern) transformBindingPattern(listMemberBindingPattern);
}
return bLangListBindingPattern;
}
private BLangMappingBindingPattern transformMappingBindingPattern(MappingBindingPatternNode
mappingBindingPatternNode,
Location pos) {
BLangMappingBindingPattern bLangMappingBindingPattern =
(BLangMappingBindingPattern) TreeBuilder.createMappingBindingPattern();
bLangMappingBindingPattern.pos = pos;
for (Node fieldBindingPattern : mappingBindingPatternNode.fieldBindingPatterns()) {
if (fieldBindingPattern.kind() == SyntaxKind.REST_BINDING_PATTERN) {
bLangMappingBindingPattern.restBindingPattern =
(BLangRestBindingPattern) transformBindingPattern(fieldBindingPattern);
continue;
}
bLangMappingBindingPattern.fieldBindingPatterns.add(
(BLangFieldBindingPattern) transformBindingPattern(fieldBindingPattern));
}
return bLangMappingBindingPattern;
}
private BLangFieldBindingPattern transformFieldBindingPattern(Node bindingPattern, Location pos) {
BLangFieldBindingPattern bLangFieldBindingPattern =
(BLangFieldBindingPattern) TreeBuilder.createFieldBindingPattern();
bLangFieldBindingPattern.pos = pos;
if (bindingPattern instanceof FieldBindingPatternVarnameNode) {
FieldBindingPatternVarnameNode fieldBindingPatternVarnameNode =
(FieldBindingPatternVarnameNode) bindingPattern;
BLangIdentifier fieldName = createIdentifier(fieldBindingPatternVarnameNode.variableName().name());
bLangFieldBindingPattern.fieldName = fieldName;
BLangCaptureBindingPattern bLangCaptureBindingPatternInFieldBindingPattern =
(BLangCaptureBindingPattern) TreeBuilder.createCaptureBindingPattern();
bLangCaptureBindingPatternInFieldBindingPattern.setIdentifier(fieldName);
bLangCaptureBindingPatternInFieldBindingPattern.pos = pos;
bLangFieldBindingPattern.bindingPattern = bLangCaptureBindingPatternInFieldBindingPattern;
return bLangFieldBindingPattern;
}
FieldBindingPatternFullNode fieldBindingPatternNode = (FieldBindingPatternFullNode) bindingPattern;
bLangFieldBindingPattern.fieldName = createIdentifier(fieldBindingPatternNode.variableName().name());
bLangFieldBindingPattern.bindingPattern =
transformBindingPattern(fieldBindingPatternNode.bindingPattern());
return bLangFieldBindingPattern;
}
private BLangNamedArgBindingPattern transformNamedArgBindingPattern(NamedArgBindingPatternNode
namedArgBindingPattern,
Location pos) {
BLangNamedArgBindingPattern bLangNamedArgBindingPattern =
(BLangNamedArgBindingPattern) TreeBuilder.createNamedArgBindingPattern();
bLangNamedArgBindingPattern.pos = pos;
bLangNamedArgBindingPattern.argName = createIdentifier(namedArgBindingPattern.argName());
bLangNamedArgBindingPattern.bindingPattern =
transformBindingPattern(namedArgBindingPattern.bindingPattern());
return bLangNamedArgBindingPattern;
}
private BLangErrorBindingPattern transformErrorBindingPattern(ErrorBindingPatternNode errorBindingPatternNode,
Location pos) {
BLangErrorBindingPattern bLangErrorBindingPattern =
(BLangErrorBindingPattern) TreeBuilder.createErrorBindingPattern();
bLangErrorBindingPattern.pos = pos;
if (errorBindingPatternNode.typeReference().isPresent()) {
Node nameReferenceNode = errorBindingPatternNode.typeReference().get();
bLangErrorBindingPattern.errorTypeReference =
(BLangUserDefinedType) createTypeNode(nameReferenceNode);
}
if (errorBindingPatternNode.argListBindingPatterns().size() == 0) {
return bLangErrorBindingPattern;
}
Node node = errorBindingPatternNode.argListBindingPatterns().get(0);
if (isErrorFieldBindingPattern(node)) {
createErrorFieldBindingPatterns(0, errorBindingPatternNode, bLangErrorBindingPattern);
return bLangErrorBindingPattern;
}
bLangErrorBindingPattern.errorMessageBindingPattern = createErrorMessageBindingPattern(node);
if (errorBindingPatternNode.argListBindingPatterns().size() == 1) {
return bLangErrorBindingPattern;
}
node = errorBindingPatternNode.argListBindingPatterns().get(1);
if (isErrorFieldBindingPattern(node)) {
createErrorFieldBindingPatterns(1, errorBindingPatternNode, bLangErrorBindingPattern);
return bLangErrorBindingPattern;
}
bLangErrorBindingPattern.errorCauseBindingPattern = createErrorCauseBindingPattern(node);
createErrorFieldBindingPatterns(2, errorBindingPatternNode, bLangErrorBindingPattern);
return bLangErrorBindingPattern;
}
private boolean isErrorFieldMatchPattern(Node node) {
return node.kind() == SyntaxKind.NAMED_ARG_MATCH_PATTERN || node.kind() == SyntaxKind.REST_MATCH_PATTERN;
}
private boolean isErrorFieldBindingPattern(Node node) {
return node.kind() == SyntaxKind.NAMED_ARG_BINDING_PATTERN || node.kind() == SyntaxKind.REST_BINDING_PATTERN;
}
private BLangErrorMessageMatchPattern createErrorMessageMatchPattern(Node node) {
BLangMatchPattern matchPattern = transformMatchPattern(node);
BLangErrorMessageMatchPattern bLangErrorMessageMatchPattern =
(BLangErrorMessageMatchPattern) TreeBuilder.createErrorMessageMatchPattern();
bLangErrorMessageMatchPattern.pos = getPosition(node);
bLangErrorMessageMatchPattern.simpleMatchPattern = createSimpleMatchPattern(matchPattern);
return bLangErrorMessageMatchPattern;
}
private BLangErrorMessageBindingPattern createErrorMessageBindingPattern(Node node) {
BLangBindingPattern bindingPattern = transformBindingPattern(node);
BLangErrorMessageBindingPattern bLangErrorMessageBindingPattern =
(BLangErrorMessageBindingPattern) TreeBuilder.createErrorMessageBindingPattern();
bLangErrorMessageBindingPattern.pos = getPosition(node);
bLangErrorMessageBindingPattern.simpleBindingPattern = createSimpleBindingPattern(bindingPattern);
return bLangErrorMessageBindingPattern;
}
private BLangErrorCauseMatchPattern createErrorCauseMatchPattern(Node node) {
BLangMatchPattern matchPattern = transformMatchPattern(node);
BLangErrorCauseMatchPattern bLangErrorCauseMatchPattern =
(BLangErrorCauseMatchPattern) TreeBuilder.createErrorCauseMatchPattern();
bLangErrorCauseMatchPattern.pos = getPosition(node);
if (matchPattern.getKind() == NodeKind.ERROR_MATCH_PATTERN) {
bLangErrorCauseMatchPattern.errorMatchPattern = (BLangErrorMatchPattern) matchPattern;
return bLangErrorCauseMatchPattern;
}
bLangErrorCauseMatchPattern.simpleMatchPattern = createSimpleMatchPattern(matchPattern);
return bLangErrorCauseMatchPattern;
}
private BLangErrorCauseBindingPattern createErrorCauseBindingPattern(Node node) {
BLangBindingPattern bindingPattern = transformBindingPattern(node);
BLangErrorCauseBindingPattern bLangErrorCauseBindingPattern =
(BLangErrorCauseBindingPattern) TreeBuilder.createErrorCauseBindingPattern();
bLangErrorCauseBindingPattern.pos = getPosition(node);
if (bindingPattern.getKind() == NodeKind.ERROR_BINDING_PATTERN) {
bLangErrorCauseBindingPattern.errorBindingPattern = (BLangErrorBindingPattern) bindingPattern;
return bLangErrorCauseBindingPattern;
}
bLangErrorCauseBindingPattern.simpleBindingPattern = createSimpleBindingPattern(bindingPattern);
return bLangErrorCauseBindingPattern;
}
private BLangErrorFieldMatchPatterns createErrorFieldMatchPattern(Node errorFieldMatchPatternNode,
BLangErrorFieldMatchPatterns bLangErrorFieldMatchPatterns) {
BLangMatchPattern matchPattern = transformMatchPattern(errorFieldMatchPatternNode);
bLangErrorFieldMatchPatterns.pos = getPosition(errorFieldMatchPatternNode);
if (matchPattern.getKind() == NodeKind.NAMED_ARG_MATCH_PATTERN) {
bLangErrorFieldMatchPatterns.addNamedArgMatchPattern(
(org.ballerinalang.model.tree.matchpatterns.NamedArgMatchPatternNode) matchPattern);
} else if (matchPattern.getKind() == NodeKind.REST_MATCH_PATTERN) {
bLangErrorFieldMatchPatterns.restMatchPattern = (BLangRestMatchPattern) matchPattern;
}
return bLangErrorFieldMatchPatterns;
}
private BLangErrorFieldBindingPatterns createErrorFieldBindingPattern(Node errorFieldBindingPatternNode,
BLangErrorFieldBindingPatterns
bLangErrorFieldBindingPatterns) {
BLangBindingPattern bindingPattern = transformBindingPattern(errorFieldBindingPatternNode);
bLangErrorFieldBindingPatterns.pos = getPosition(errorFieldBindingPatternNode);
if (bindingPattern.getKind() == NodeKind.NAMED_ARG_BINDING_PATTERN) {
bLangErrorFieldBindingPatterns.
addNamedArgBindingPattern(
(org.ballerinalang.model.tree.bindingpattern.NamedArgBindingPatternNode) bindingPattern);
} else if (bindingPattern.getKind() == NodeKind.REST_BINDING_PATTERN) {
bLangErrorFieldBindingPatterns.restBindingPattern = (BLangRestBindingPattern) bindingPattern;
}
return bLangErrorFieldBindingPatterns;
}
private void createErrorFieldMatchPatterns(int index, ErrorMatchPatternNode errorMatchPatternNode,
BLangErrorMatchPattern bLangErrorMatchPattern) {
BLangErrorFieldMatchPatterns bLangErrorFieldMatchPatterns =
(BLangErrorFieldMatchPatterns) TreeBuilder.createErrorFieldMatchPattern();
for (int i = index; i < errorMatchPatternNode.argListMatchPatternNode().size(); i++) {
Node errorFieldMatchPatternNode = errorMatchPatternNode.argListMatchPatternNode().get(i);
bLangErrorMatchPattern.errorFieldMatchPatterns = createErrorFieldMatchPattern(errorFieldMatchPatternNode,
bLangErrorFieldMatchPatterns);
}
}
private void createErrorFieldBindingPatterns(int index, ErrorBindingPatternNode errorBindingPatternNode,
BLangErrorBindingPattern bLangErrorBindingPattern) {
BLangErrorFieldBindingPatterns bLangErrorFieldBindingPatterns =
(BLangErrorFieldBindingPatterns) TreeBuilder.createErrorFieldBindingPattern();
for (int i = index; i < errorBindingPatternNode.argListBindingPatterns().size(); i++) {
Node errorFieldBindingPatternNode = errorBindingPatternNode.argListBindingPatterns().get(i);
bLangErrorBindingPattern.errorFieldBindingPatterns =
createErrorFieldBindingPattern(errorFieldBindingPatternNode, bLangErrorFieldBindingPatterns);
}
}
private BLangSimpleMatchPattern createSimpleMatchPattern(BLangNode bLangNode) {
BLangSimpleMatchPattern bLangSimpleMatchPattern =
(BLangSimpleMatchPattern) TreeBuilder.createSimpleMatchPattern();
NodeKind kind = bLangNode.getKind();
switch (kind) {
case WILDCARD_MATCH_PATTERN:
bLangSimpleMatchPattern.wildCardMatchPattern = (BLangWildCardMatchPattern) bLangNode;
break;
case CONST_MATCH_PATTERN:
bLangSimpleMatchPattern.constPattern = (BLangConstPattern) bLangNode;
break;
case VAR_BINDING_PATTERN_MATCH_PATTERN:
bLangSimpleMatchPattern.varVariableName = (BLangVarBindingPatternMatchPattern) bLangNode;
break;
}
return bLangSimpleMatchPattern;
}
private BLangCaptureBindingPattern createCaptureBindingPattern(CaptureBindingPatternNode
captureBindingPatternNode) {
BLangCaptureBindingPattern bLangCaptureBindingPattern =
(BLangCaptureBindingPattern) TreeBuilder.createCaptureBindingPattern();
bLangCaptureBindingPattern.setIdentifier(createIdentifier(captureBindingPatternNode
.variableName()));
bLangCaptureBindingPattern.pos = getPosition(captureBindingPatternNode);
return bLangCaptureBindingPattern;
}
private BLangSimpleBindingPattern createSimpleBindingPattern(BLangNode bLangNode) {
BLangSimpleBindingPattern bLangSimpleBindingPattern =
(BLangSimpleBindingPattern) TreeBuilder.createSimpleBindingPattern();
NodeKind kind = bLangNode.getKind();
switch (kind) {
case WILDCARD_BINDING_PATTERN:
bLangSimpleBindingPattern.wildCardBindingPattern = (BLangWildCardBindingPattern) bLangNode;
break;
case CAPTURE_BINDING_PATTERN:
bLangSimpleBindingPattern.captureBindingPattern = (BLangCaptureBindingPattern) bLangNode;
break;
}
return bLangSimpleBindingPattern;
}
private BLangXMLElementFilter createXMLElementFilter(Node node) {
String ns = "";
String elementName = "*";
Location nsPos = null;
Location elemNamePos = null;
SyntaxKind kind = node.kind();
switch (kind) {
case SIMPLE_NAME_REFERENCE:
SimpleNameReferenceNode simpleNameReferenceNode = (SimpleNameReferenceNode) node;
elementName = simpleNameReferenceNode.name().text();
elemNamePos = getPosition(simpleNameReferenceNode);
break;
case QUALIFIED_NAME_REFERENCE:
QualifiedNameReferenceNode qualifiedNameReferenceNode = (QualifiedNameReferenceNode) node;
elementName = qualifiedNameReferenceNode.identifier().text();
elemNamePos = getPosition(qualifiedNameReferenceNode.identifier());
ns = qualifiedNameReferenceNode.modulePrefix().text();
nsPos = getPosition(qualifiedNameReferenceNode.modulePrefix());
break;
case XML_ATOMIC_NAME_PATTERN:
XMLAtomicNamePatternNode atomicNamePatternNode = (XMLAtomicNamePatternNode) node;
elementName = atomicNamePatternNode.name().text();
elemNamePos = getPosition(atomicNamePatternNode.name());
ns = atomicNamePatternNode.prefix().text();
nsPos = getPosition(atomicNamePatternNode.prefix());
break;
case ASTERISK_TOKEN:
elemNamePos = getPosition(node);
}
if (stringStartsWithSingleQuote(ns)) {
ns = ns.substring(1);
}
if (stringStartsWithSingleQuote(elementName)) {
elementName = elementName.substring(1);
}
return new BLangXMLElementFilter(getPosition(node), null, ns, nsPos, elementName, elemNamePos);
}
private boolean stringStartsWithSingleQuote(String ns) {
return ns != null && ns.length() > 0 && ns.charAt(0) == '\'';
}
private String getValueFromByteArrayNode(ByteArrayLiteralNode byteArrayLiteralNode) {
StringBuilder value = new StringBuilder();
value.append(byteArrayLiteralNode.type().text());
value.append(" ");
value.append("`");
if (byteArrayLiteralNode.content().isPresent()) {
value.append(byteArrayLiteralNode.content().get().text());
}
value.append("`");
return value.toString();
}
private BLangRecordVariable createBLangRecordVariable(MappingBindingPatternNode mappingBindingPatternNode) {
BLangRecordVariable recordVariable = (BLangRecordVariable) TreeBuilder.createRecordVariableNode();
List<BLangRecordVariableKeyValue> fieldBindingPatternsList = new ArrayList<>();
for (BindingPatternNode node : mappingBindingPatternNode.fieldBindingPatterns()) {
BLangRecordVariableKeyValue recordKeyValue = new BLangRecordVariableKeyValue();
if (node instanceof FieldBindingPatternFullNode) {
FieldBindingPatternFullNode fullNode = (FieldBindingPatternFullNode) node;
recordKeyValue.key = createIdentifier(fullNode.variableName().name());
recordKeyValue.valueBindingPattern = getBLangVariableNode(fullNode.bindingPattern());
} else if (node instanceof FieldBindingPatternVarnameNode) {
FieldBindingPatternVarnameNode varnameNode = (FieldBindingPatternVarnameNode) node;
recordKeyValue.key = createIdentifier(varnameNode.variableName().name());
BLangSimpleVariable value = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
value.pos = getPosition(varnameNode);
IdentifierNode name = createIdentifier(varnameNode.variableName().name());
((BLangIdentifier) name).pos = value.pos;
value.setName(name);
recordKeyValue.valueBindingPattern = value;
} else {
recordVariable.restParam = getBLangVariableNode(node);
break;
}
fieldBindingPatternsList.add(recordKeyValue);
}
recordVariable.variableList = fieldBindingPatternsList;
recordVariable.pos = getPosition(mappingBindingPatternNode);
return recordVariable;
}
private BLangLiteral createEmptyLiteral() {
BLangLiteral bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
bLiteral.value = "";
bLiteral.originalValue = "";
bLiteral.type = symTable.getTypeFromTag(TypeTags.STRING);
return bLiteral;
}
private BLangVariable createSimpleVariable(Location location,
Token identifier,
Location identifierPos) {
BLangSimpleVariable memberVar = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
memberVar.pos = location;
IdentifierNode name = createIdentifier(identifierPos, identifier);
((BLangIdentifier) name).pos = identifierPos;
memberVar.setName(name);
return memberVar;
}
private BLangVariable getBLangVariableNode(BindingPatternNode bindingPattern) {
Token varName;
switch (bindingPattern.kind()) {
case MAPPING_BINDING_PATTERN:
MappingBindingPatternNode mappingBindingPatternNode = (MappingBindingPatternNode) bindingPattern;
return createBLangRecordVariable(mappingBindingPatternNode);
case LIST_BINDING_PATTERN:
ListBindingPatternNode listBindingPatternNode = (ListBindingPatternNode) bindingPattern;
BLangTupleVariable tupleVariable = (BLangTupleVariable) TreeBuilder.createTupleVariableNode();
tupleVariable.pos = getPosition(listBindingPatternNode);
for (BindingPatternNode memberBindingPattern : listBindingPatternNode.bindingPatterns()) {
if (memberBindingPattern.kind() == SyntaxKind.REST_BINDING_PATTERN) {
tupleVariable.restVariable = getBLangVariableNode(memberBindingPattern);
} else {
BLangVariable member = getBLangVariableNode(memberBindingPattern);
tupleVariable.memberVariables.add(member);
}
}
return tupleVariable;
case ERROR_BINDING_PATTERN:
ErrorBindingPatternNode errorBindingPatternNode = (ErrorBindingPatternNode) bindingPattern;
BLangErrorVariable bLangErrorVariable = (BLangErrorVariable) TreeBuilder.createErrorVariableNode();
bLangErrorVariable.pos = getPosition(errorBindingPatternNode);
Optional<Node> errorTypeRef = errorBindingPatternNode.typeReference();
if (errorTypeRef.isPresent()) {
bLangErrorVariable.typeNode = createTypeNode(errorTypeRef.get());
}
SeparatedNodeList<BindingPatternNode> argListBindingPatterns =
errorBindingPatternNode.argListBindingPatterns();
int numberOfArgs = argListBindingPatterns.size();
List<BLangErrorVariable.BLangErrorDetailEntry> namedArgs = new ArrayList<>();
for (int position = 0; position < numberOfArgs; position++) {
BindingPatternNode bindingPatternNode = argListBindingPatterns.get(position);
switch (bindingPatternNode.kind()) {
case CAPTURE_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
if (position == 0) {
bLangErrorVariable.message =
(BLangSimpleVariable) getBLangVariableNode(bindingPatternNode);
break;
}
case ERROR_BINDING_PATTERN:
bLangErrorVariable.cause = getBLangVariableNode(bindingPatternNode);
break;
case NAMED_ARG_BINDING_PATTERN:
NamedArgBindingPatternNode namedArgBindingPatternNode =
(NamedArgBindingPatternNode) bindingPatternNode;
BLangIdentifier key =
createIdentifier(namedArgBindingPatternNode.argName());
BLangVariable valueBindingPattern =
getBLangVariableNode(namedArgBindingPatternNode.bindingPattern());
BLangErrorVariable.BLangErrorDetailEntry detailEntry =
new BLangErrorVariable.BLangErrorDetailEntry(key, valueBindingPattern);
namedArgs.add(detailEntry);
break;
default:
bLangErrorVariable.restDetail =
(BLangSimpleVariable) getBLangVariableNode(bindingPatternNode);
}
}
bLangErrorVariable.detail = namedArgs;
return bLangErrorVariable;
case REST_BINDING_PATTERN:
RestBindingPatternNode restBindingPatternNode = (RestBindingPatternNode) bindingPattern;
varName = restBindingPatternNode.variableName().name();
break;
case WILDCARD_BINDING_PATTERN:
WildcardBindingPatternNode wildcardBindingPatternNode = (WildcardBindingPatternNode) bindingPattern;
varName = wildcardBindingPatternNode.underscoreToken();
break;
case CAPTURE_BINDING_PATTERN:
default:
CaptureBindingPatternNode captureBindingPatternNode = (CaptureBindingPatternNode) bindingPattern;
varName = captureBindingPatternNode.variableName();
break;
}
Location pos = getPosition(bindingPattern);
return createSimpleVariable(pos, varName, getPosition(varName));
}
BLangValueType addValueType(Location pos, TypeKind typeKind) {
BLangValueType typeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
typeNode.pos = pos;
typeNode.typeKind = typeKind;
return typeNode;
}
private List<BLangStatement> generateBLangStatements(NodeList<StatementNode> statementNodes) {
List<BLangStatement> statements = new ArrayList<>();
return generateAndAddBLangStatements(statementNodes, statements);
}
private List<BLangStatement> generateAndAddBLangStatements(NodeList<StatementNode> statementNodes,
List<BLangStatement> statements) {
for (StatementNode statement : statementNodes) {
if (statement != null) {
if (statement.kind() == SyntaxKind.FORK_STATEMENT) {
generateForkStatements(statements, (ForkStatementNode) statement);
continue;
}
statements.add((BLangStatement) statement.apply(this));
}
}
return statements;
}
private String extractVersion(SeparatedNodeList<Token> versionNumbers) {
StringBuilder version = new StringBuilder();
int size = versionNumbers.size();
for (int i = 0; i < size; i++) {
if (i != 0) {
version.append(".");
}
version.append(versionNumbers.get(i).text());
}
return version.toString();
}
private void generateForkStatements(List<BLangStatement> statements, ForkStatementNode forkStatementNode) {
BLangForkJoin forkJoin = (BLangForkJoin) forkStatementNode.apply(this);
String nextAnonymousForkKey = anonymousModelHelper.getNextAnonymousForkKey(packageID);
for (NamedWorkerDeclarationNode workerDeclarationNode : forkStatementNode.namedWorkerDeclarations()) {
BLangSimpleVariableDef workerDef = (BLangSimpleVariableDef) workerDeclarationNode.apply(this);
workerDef.isWorker = true;
workerDef.isInFork = true;
workerDef.var.flagSet.add(Flag.FORKED);
BLangFunction function = ((BLangLambdaFunction) workerDef.var.expr).function;
function.addFlag(Flag.FORKED);
function.anonForkName = nextAnonymousForkKey;
statements.add(workerDef);
while (!this.additionalStatements.empty()) {
statements.add(additionalStatements.pop());
}
forkJoin.addWorkers(workerDef);
}
statements.add(forkJoin);
}
private BLangCheckedExpr createCheckExpr(Location pos, BLangExpression expr) {
BLangCheckedExpr checkedExpr = (BLangCheckedExpr) TreeBuilder.createCheckExpressionNode();
checkedExpr.pos = pos;
checkedExpr.expr = expr;
return checkedExpr;
}
private BLangCheckPanickedExpr createCheckPanickedExpr(Location pos, BLangExpression expr) {
BLangCheckPanickedExpr checkPanickedExpr =
(BLangCheckPanickedExpr) TreeBuilder.createCheckPanicExpressionNode();
checkPanickedExpr.pos = pos;
checkPanickedExpr.expr = expr;
return checkPanickedExpr;
}
private void populateFuncSignature(BLangFunction bLFunction, FunctionSignatureNode funcSignature) {
for (ParameterNode child : funcSignature.parameters()) {
SimpleVariableNode param = (SimpleVariableNode) child.apply(this);
if (child instanceof RestParameterNode) {
bLFunction.setRestParameter(param);
} else {
bLFunction.addParameter(param);
}
}
Optional<ReturnTypeDescriptorNode> retNode = funcSignature.returnTypeDesc();
if (retNode.isPresent()) {
ReturnTypeDescriptorNode returnType = retNode.get();
bLFunction.setReturnTypeNode(createTypeNode(returnType.type()));
bLFunction.returnTypeAnnAttachments = applyAll(returnType.annotations());
} else {
BLangValueType bLValueType = (BLangValueType) TreeBuilder.createValueTypeNode();
bLValueType.pos = symTable.builtinPos;
bLValueType.typeKind = TypeKind.NIL;
bLFunction.setReturnTypeNode(bLValueType);
}
}
private BLangUnaryExpr createBLangUnaryExpr(Location location,
OperatorKind operatorKind,
BLangExpression expr) {
BLangUnaryExpr bLUnaryExpr = (BLangUnaryExpr) TreeBuilder.createUnaryExpressionNode();
bLUnaryExpr.pos = location;
bLUnaryExpr.operator = operatorKind;
bLUnaryExpr.expr = expr;
return bLUnaryExpr;
}
private BLangExpression createExpression(Node expression) {
if (expression.kind() == SyntaxKind.ASYNC_SEND_ACTION) {
dlog.error(getPosition(expression), DiagnosticErrorCode.ASYNC_SEND_NOT_YET_SUPPORTED_AS_EXPRESSION);
Token missingIdentifier = NodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN,
NodeFactory.createEmptyMinutiaeList(), NodeFactory.createEmptyMinutiaeList());
expression = NodeFactory.createSimpleNameReferenceNode(missingIdentifier);
}
return (BLangExpression) createActionOrExpression(expression);
}
private BLangNode createActionOrExpression(Node actionOrExpression) {
if (isSimpleLiteral(actionOrExpression.kind())) {
return createSimpleLiteral(actionOrExpression);
} else if (actionOrExpression.kind() == SyntaxKind.SIMPLE_NAME_REFERENCE ||
actionOrExpression.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE ||
actionOrExpression.kind() == SyntaxKind.IDENTIFIER_TOKEN) {
BLangNameReference nameReference = createBLangNameReference(actionOrExpression);
BLangSimpleVarRef bLVarRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
bLVarRef.pos = getPosition(actionOrExpression);
bLVarRef.pkgAlias = this.createIdentifier((Location) nameReference.pkgAlias.getPosition(),
nameReference.pkgAlias.getValue());
bLVarRef.variableName = this.createIdentifier((Location) nameReference.name.getPosition(),
nameReference.name.getValue());
return bLVarRef;
} else if (actionOrExpression.kind() == SyntaxKind.BRACED_EXPRESSION) {
BLangGroupExpr group = (BLangGroupExpr) TreeBuilder.createGroupExpressionNode();
group.expression = (BLangExpression) actionOrExpression.apply(this);
group.pos = getPosition(actionOrExpression);
return group;
} else if (isType(actionOrExpression.kind())) {
BLangTypedescExpr typeAccessExpr = (BLangTypedescExpr) TreeBuilder.createTypeAccessNode();
typeAccessExpr.pos = getPosition(actionOrExpression);
typeAccessExpr.typeNode = createTypeNode(actionOrExpression);
return typeAccessExpr;
} else {
return actionOrExpression.apply(this);
}
}
private BLangNode createStringTemplateLiteral(NodeList<Node> memberNodes, Location location) {
BLangStringTemplateLiteral stringTemplateLiteral =
(BLangStringTemplateLiteral) TreeBuilder.createStringTemplateLiteralNode();
for (Node memberNode : memberNodes) {
stringTemplateLiteral.exprs.add((BLangExpression) memberNode.apply(this));
}
if (stringTemplateLiteral.exprs.isEmpty()) {
BLangLiteral emptyLiteral = createEmptyLiteral();
emptyLiteral.pos = location;
stringTemplateLiteral.exprs.add(emptyLiteral);
}
stringTemplateLiteral.pos = location;
return stringTemplateLiteral;
}
private BLangRawTemplateLiteral createRawTemplateLiteral(NodeList<Node> members, Location location) {
BLangRawTemplateLiteral literal = (BLangRawTemplateLiteral) TreeBuilder.createRawTemplateLiteralNode();
literal.pos = location;
boolean prevNodeWasInterpolation = false;
Node firstMember = members.isEmpty() ? null : members.get(0);
if (firstMember != null && firstMember.kind() == SyntaxKind.INTERPOLATION) {
literal.strings.add(createStringLiteral("", getPosition(firstMember)));
}
for (Node member : members) {
if (member.kind() == SyntaxKind.INTERPOLATION) {
literal.insertions.add((BLangExpression) member.apply(this));
if (prevNodeWasInterpolation) {
literal.strings.add(createStringLiteral("", getPosition(member)));
}
prevNodeWasInterpolation = true;
} else {
literal.strings.add((BLangLiteral) member.apply(this));
prevNodeWasInterpolation = false;
}
}
if (prevNodeWasInterpolation) {
literal.strings.add(createStringLiteral("", getPosition(members.get(members.size() - 1))));
}
return literal;
}
private BLangSimpleVariable createSimpleVar(Optional<Token> name, Node type, NodeList<AnnotationNode> annotations) {
if (name.isPresent()) {
Token nameToken = name.get();
return createSimpleVar(nameToken, type, null, null, annotations);
}
return createSimpleVar(null, type, null, null, annotations);
}
private BLangSimpleVariable createSimpleVar(Token name, Node type, NodeList<AnnotationNode> annotations) {
return createSimpleVar(name, type, null, null, annotations);
}
private BLangSimpleVariable createSimpleVar(Token name, Node typeName, Node initializer,
Token visibilityQualifier, NodeList<AnnotationNode> annotations) {
BLangSimpleVariable bLSimpleVar = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
bLSimpleVar.setName(this.createIdentifier(name));
bLSimpleVar.name.pos = getPosition(name);
if (isDeclaredWithVar(typeName)) {
bLSimpleVar.isDeclaredWithVar = true;
} else {
bLSimpleVar.setTypeNode(createTypeNode(typeName));
}
if (visibilityQualifier != null) {
if (visibilityQualifier.kind() == SyntaxKind.PRIVATE_KEYWORD) {
bLSimpleVar.flagSet.add(Flag.PRIVATE);
} else if (visibilityQualifier.kind() == SyntaxKind.PUBLIC_KEYWORD) {
bLSimpleVar.flagSet.add(Flag.PUBLIC);
}
}
if (initializer != null) {
bLSimpleVar.setInitialExpression(createExpression(initializer));
}
if (annotations != null) {
bLSimpleVar.annAttachments = applyAll(annotations);
}
return bLSimpleVar;
}
private boolean isDeclaredWithVar(Node typeNode) {
if (typeNode == null || typeNode.kind() == SyntaxKind.VAR_TYPE_DESC) {
return true;
}
return false;
}
private BLangIdentifier createIdentifier(Token token) {
return createIdentifier(getPosition(token), token);
}
private BLangIdentifier createIdentifier(Location pos, Token token) {
if (token == null) {
return createIdentifier(pos, null, null);
}
String identifierName;
if (token.isMissing()) {
identifierName = missingNodesHelper.getNextMissingNodeName(packageID);
} else {
identifierName = token.text();
}
return createIdentifier(pos, identifierName);
}
private BLangIdentifier createIdentifier(Location pos, String value) {
return createIdentifier(pos, value, null);
}
private BLangIdentifier createIdentifier(Location pos, String value, Set<Whitespace> ws) {
BLangIdentifier bLIdentifer = (BLangIdentifier) TreeBuilder.createIdentifierNode();
if (value == null) {
return bLIdentifer;
}
if (value.startsWith(IDENTIFIER_LITERAL_PREFIX)) {
bLIdentifer.setValue(IdentifierUtils.unescapeUnicodeCodepoints(value.substring(1)));
bLIdentifer.originalValue = value;
bLIdentifer.setLiteral(true);
} else {
bLIdentifer.setValue(IdentifierUtils.unescapeUnicodeCodepoints(value));
bLIdentifer.setLiteral(false);
}
bLIdentifer.pos = pos;
if (ws != null) {
bLIdentifer.addWS(ws);
}
return bLIdentifer;
}
private BLangLiteral createEmptyStringLiteral(Location pos) {
BLangLiteral bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
bLiteral.pos = pos;
bLiteral.type = symTable.stringType;
bLiteral.value = "";
bLiteral.originalValue = "";
return bLiteral;
}
private BLangLiteral createSimpleLiteral(Node literal) {
return createSimpleLiteral(literal, false);
}
private BLangLiteral createSimpleLiteral(Node literal, boolean isFiniteType) {
if (literal.kind() == SyntaxKind.UNARY_EXPRESSION) {
UnaryExpressionNode unaryExpr = (UnaryExpressionNode) literal;
BLangLiteral bLangLiteral =
createSimpleLiteral(unaryExpr.expression(), unaryExpr.unaryOperator().kind(), isFiniteType);
bLangLiteral.pos = getPosition(unaryExpr);
return bLangLiteral;
}
return createSimpleLiteral(literal, SyntaxKind.NONE, isFiniteType);
}
private BLangLiteral createSimpleLiteral(Node literal, SyntaxKind sign, boolean isFiniteType) {
BLangLiteral bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
SyntaxKind type = literal.kind();
int typeTag = -1;
Object value = null;
String originalValue = null;
String textValue;
if (literal instanceof BasicLiteralNode) {
textValue = ((BasicLiteralNode) literal).literalToken().text();
} else if (literal instanceof Token) {
textValue = ((Token) literal).text();
} else {
textValue = "";
}
if (sign == SyntaxKind.PLUS_TOKEN) {
textValue = "+" + textValue;
} else if (sign == SyntaxKind.MINUS_TOKEN) {
textValue = "-" + textValue;
}
if (type == SyntaxKind.NUMERIC_LITERAL) {
SyntaxKind literalTokenKind = ((BasicLiteralNode) literal).literalToken().kind();
if (literalTokenKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN ||
literalTokenKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) {
typeTag = TypeTags.INT;
value = getIntegerLiteral(literal, textValue, sign);
originalValue = textValue;
bLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression();
if (literalTokenKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN && withinByteRange(value)) {
typeTag = TypeTags.BYTE;
}
} else if (literalTokenKind == SyntaxKind.DECIMAL_FLOATING_POINT_LITERAL_TOKEN) {
typeTag = NumericLiteralSupport.isDecimalDiscriminated(textValue) ? TypeTags.DECIMAL : TypeTags.FLOAT;
if (isFiniteType) {
value = textValue.replaceAll("[fd+]", "");
originalValue = textValue.replace("+", "");
} else {
value = textValue;
originalValue = textValue;
}
bLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression();
} else if (literalTokenKind == SyntaxKind.HEX_FLOATING_POINT_LITERAL_TOKEN) {
typeTag = TypeTags.FLOAT;
value = getHexNodeValue(textValue);
originalValue = textValue;
bLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression();
}
} else if (type == SyntaxKind.BOOLEAN_LITERAL) {
typeTag = TypeTags.BOOLEAN;
value = Boolean.parseBoolean(textValue);
originalValue = textValue;
bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
} else if (type == SyntaxKind.STRING_LITERAL || type == SyntaxKind.XML_TEXT_CONTENT ||
type == SyntaxKind.TEMPLATE_STRING || type == SyntaxKind.IDENTIFIER_TOKEN) {
String text = textValue;
if (type == SyntaxKind.STRING_LITERAL) {
if (text.length() > 1 && text.charAt(text.length() - 1) == '"') {
text = text.substring(1, text.length() - 1);
} else {
text = text.substring(1);
}
}
String originalText = text;
Matcher matcher = IdentifierUtils.UNICODE_PATTERN.matcher(text);
int position = 0;
while (matcher.find(position)) {
String hexStringVal = matcher.group(1);
int hexDecimalVal = Integer.parseInt(hexStringVal, 16);
if ((hexDecimalVal >= Constants.MIN_UNICODE && hexDecimalVal <= Constants.MIDDLE_LIMIT_UNICODE)
|| hexDecimalVal > Constants.MAX_UNICODE) {
String hexStringWithBraces = matcher.group(0);
int offset = originalText.indexOf(hexStringWithBraces) + 1;
Location pos = getPosition(literal);
dlog.error(new BLangDiagnosticLocation(currentCompUnitName,
pos.lineRange().startLine().line(),
pos.lineRange().endLine().line(),
pos.lineRange().startLine().offset() + offset,
pos.lineRange().startLine().offset() + offset + hexStringWithBraces.length()),
DiagnosticErrorCode.INVALID_UNICODE, hexStringWithBraces);
}
text = matcher.replaceFirst("\\\\u" + fillWithZeros(hexStringVal));
position = matcher.end() - 2;
matcher = IdentifierUtils.UNICODE_PATTERN.matcher(text);
}
if (type != SyntaxKind.TEMPLATE_STRING && type != SyntaxKind.XML_TEXT_CONTENT) {
text = StringEscapeUtils.unescapeJava(text);
}
typeTag = TypeTags.STRING;
value = text;
originalValue = textValue;
bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
} else if (type == SyntaxKind.NIL_LITERAL) {
originalValue = "()";
typeTag = TypeTags.NIL;
value = null;
bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
} else if (type == SyntaxKind.NULL_LITERAL) {
originalValue = "null";
typeTag = TypeTags.NIL;
value = null;
bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
} else if (type == SyntaxKind.BINARY_EXPRESSION) {
typeTag = TypeTags.BYTE_ARRAY;
value = textValue;
originalValue = textValue;
if (isNumericLiteral(type)) {
bLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression();
} else {
bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
}
} else if (type == SyntaxKind.BYTE_ARRAY_LITERAL) {
return (BLangLiteral) literal.apply(this);
}
bLiteral.pos = getPosition(literal);
bLiteral.type = symTable.getTypeFromTag(typeTag);
bLiteral.type.tag = typeTag;
bLiteral.value = value;
bLiteral.originalValue = originalValue;
return bLiteral;
}
private BLangLiteral createStringLiteral(String value, Location pos) {
BLangLiteral strLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
strLiteral.value = strLiteral.originalValue = value;
strLiteral.type = symTable.stringType;
strLiteral.pos = pos;
return strLiteral;
}
private BLangType createTypeNode(Node type) {
if (type instanceof BuiltinSimpleNameReferenceNode || type.kind() == SyntaxKind.NIL_TYPE_DESC) {
return createBuiltInTypeNode(type);
} else if (type.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE || type.kind() == SyntaxKind.IDENTIFIER_TOKEN) {
BLangUserDefinedType bLUserDefinedType = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode();
BLangNameReference nameReference = createBLangNameReference(type);
bLUserDefinedType.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
bLUserDefinedType.typeName = (BLangIdentifier) nameReference.name;
bLUserDefinedType.pos = getPosition(type);
return bLUserDefinedType;
} else if (type.kind() == SyntaxKind.SIMPLE_NAME_REFERENCE) {
if (type.hasDiagnostics()) {
BLangUserDefinedType bLUserDefinedType = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode();
BLangIdentifier pkgAlias = this.createIdentifier(null, "");
BLangIdentifier name = this.createIdentifier(((SimpleNameReferenceNode) type).name());
BLangNameReference nameReference = new BLangNameReference(getPosition(type), null, pkgAlias, name);
bLUserDefinedType.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
bLUserDefinedType.typeName = (BLangIdentifier) nameReference.name;
bLUserDefinedType.pos = getPosition(type);
return bLUserDefinedType;
}
SimpleNameReferenceNode nameReferenceNode = (SimpleNameReferenceNode) type;
return createTypeNode(nameReferenceNode.name());
}
return (BLangType) type.apply(this);
}
private BLangType createBuiltInTypeNode(Node type) {
String typeText;
if (type.kind() == SyntaxKind.NIL_TYPE_DESC) {
typeText = "()";
} else if (type instanceof BuiltinSimpleNameReferenceNode) {
BuiltinSimpleNameReferenceNode simpleNameRef = (BuiltinSimpleNameReferenceNode) type;
if (simpleNameRef.kind() == SyntaxKind.VAR_TYPE_DESC) {
return null;
} else if (simpleNameRef.name().isMissing()) {
String name = missingNodesHelper.getNextMissingNodeName(packageID);
BLangIdentifier identifier = createIdentifier(getPosition(simpleNameRef.name()), name);
BLangIdentifier pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
return createUserDefinedType(getPosition(type), pkgAlias, identifier);
}
typeText = simpleNameRef.name().text();
} else {
typeText = ((Token) type).text();
}
TypeKind typeKind = TreeUtils.stringToTypeKind(typeText.replaceAll("\\s+", ""));
SyntaxKind kind = type.kind();
switch (kind) {
case BOOLEAN_TYPE_DESC:
case INT_TYPE_DESC:
case BYTE_TYPE_DESC:
case FLOAT_TYPE_DESC:
case DECIMAL_TYPE_DESC:
case STRING_TYPE_DESC:
case ANY_TYPE_DESC:
case NIL_TYPE_DESC:
case HANDLE_TYPE_DESC:
case ANYDATA_TYPE_DESC:
case READONLY_TYPE_DESC:
BLangValueType valueType = (BLangValueType) TreeBuilder.createValueTypeNode();
valueType.typeKind = typeKind;
valueType.pos = getPosition(type);
return valueType;
default:
BLangBuiltInRefTypeNode builtInValueType =
(BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
builtInValueType.typeKind = typeKind;
builtInValueType.pos = getPosition(type);
return builtInValueType;
}
}
private VariableNode createBasicVarNodeWithoutType(Location location, Set<Whitespace> ws,
String identifier, Location identifierLocation,
ExpressionNode expr) {
BLangSimpleVariable bLSimpleVar = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
bLSimpleVar.pos = location;
IdentifierNode name = this.createIdentifier(identifierLocation, identifier, ws);
((BLangIdentifier) name).pos = identifierLocation;
bLSimpleVar.setName(name);
bLSimpleVar.addWS(ws);
if (expr != null) {
bLSimpleVar.setInitialExpression(expr);
}
return bLSimpleVar;
}
private BLangInvocation createBLangInvocation(Node nameNode, NodeList<FunctionArgumentNode> arguments,
Location position, boolean isAsync) {
BLangInvocation bLInvocation;
if (isAsync) {
bLInvocation = (BLangInvocation) TreeBuilder.createActionInvocation();
} else {
bLInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
}
BLangNameReference reference = createBLangNameReference(nameNode);
bLInvocation.pkgAlias = (BLangIdentifier) reference.pkgAlias;
bLInvocation.name = (BLangIdentifier) reference.name;
List<BLangExpression> args = new ArrayList<>();
arguments.iterator().forEachRemaining(arg -> args.add(createExpression(arg)));
bLInvocation.argExprs = args;
bLInvocation.pos = position;
return bLInvocation;
}
private BLangNameReference createBLangNameReference(Node node) {
switch (node.kind()) {
case QUALIFIED_NAME_REFERENCE:
QualifiedNameReferenceNode iNode = (QualifiedNameReferenceNode) node;
Token modulePrefix = iNode.modulePrefix();
IdentifierToken identifier = iNode.identifier();
BLangIdentifier pkgAlias = this.createIdentifier(getPosition(modulePrefix), modulePrefix);
Location namePos = getPosition(identifier);
BLangIdentifier name = this.createIdentifier(namePos, identifier);
return new BLangNameReference(getPosition(node), null, pkgAlias, name);
case ERROR_TYPE_DESC:
node = ((BuiltinSimpleNameReferenceNode) node).name();
break;
case NEW_KEYWORD:
case IDENTIFIER_TOKEN:
case ERROR_KEYWORD:
break;
case SIMPLE_NAME_REFERENCE:
default:
node = ((SimpleNameReferenceNode) node).name();
break;
}
Token iToken = (Token) node;
BLangIdentifier pkgAlias = this.createIdentifier(symTable.builtinPos, "");
BLangIdentifier name = this.createIdentifier(iToken);
return new BLangNameReference(getPosition(node), null, pkgAlias, name);
}
private BLangMarkdownDocumentation createMarkdownDocumentationAttachment(Optional<Node> markdownDocumentationNode) {
if (markdownDocumentationNode == null || !markdownDocumentationNode.isPresent()) {
return null;
}
BLangMarkdownDocumentation doc = (BLangMarkdownDocumentation) TreeBuilder.createMarkdownDocumentationNode();
LinkedList<BLangMarkdownDocumentationLine> documentationLines = new LinkedList<>();
LinkedList<BLangMarkdownParameterDocumentation> parameters = new LinkedList<>();
LinkedList<BLangMarkdownReferenceDocumentation> references = new LinkedList<>();
MarkdownDocumentationNode markdownDocNode = (MarkdownDocumentationNode) markdownDocumentationNode.get();
NodeList<Node> docLineList = markdownDocNode.documentationLines();
BLangMarkdownParameterDocumentation bLangParaDoc = null;
BLangMarkdownReturnParameterDocumentation bLangReturnParaDoc = null;
BLangMarkDownDeprecationDocumentation bLangDeprecationDoc = null;
BLangMarkDownDeprecatedParametersDocumentation bLangDeprecatedParaDoc = null;
for (Node singleDocLine : docLineList) {
switch (singleDocLine.kind()) {
case MARKDOWN_DOCUMENTATION_LINE:
case MARKDOWN_REFERENCE_DOCUMENTATION_LINE:
MarkdownDocumentationLineNode docLineNode = (MarkdownDocumentationLineNode) singleDocLine;
NodeList<Node> docElements = docLineNode.documentElements();
String docText = addReferencesAndReturnDocumentationText(references, docElements);
if (bLangDeprecationDoc != null) {
bLangDeprecationDoc.deprecationDocumentationLines.add(docText);
} else if (bLangReturnParaDoc != null) {
bLangReturnParaDoc.returnParameterDocumentationLines.add(docText);
} else if (bLangParaDoc != null) {
bLangParaDoc.parameterDocumentationLines.add(docText);
} else {
BLangMarkdownDocumentationLine bLangDocLine =
(BLangMarkdownDocumentationLine) TreeBuilder.createMarkdownDocumentationTextNode();
bLangDocLine.text = docText;
bLangDocLine.pos = getPosition(docLineNode);
documentationLines.add(bLangDocLine);
}
break;
case MARKDOWN_PARAMETER_DOCUMENTATION_LINE:
bLangParaDoc = new BLangMarkdownParameterDocumentation();
MarkdownParameterDocumentationLineNode parameterDocLineNode =
(MarkdownParameterDocumentationLineNode) singleDocLine;
BLangIdentifier paraName = new BLangIdentifier();
Token parameterName = parameterDocLineNode.parameterName();
paraName.value = parameterName.isMissing() ? "" : parameterName.text();
bLangParaDoc.parameterName = paraName;
NodeList<Node> paraDocElements = parameterDocLineNode.documentElements();
String paraDocText = addReferencesAndReturnDocumentationText(references, paraDocElements);
bLangParaDoc.parameterDocumentationLines.add(paraDocText);
bLangParaDoc.pos = getPosition(parameterName);
if (bLangDeprecatedParaDoc != null) {
bLangDeprecatedParaDoc.parameters.add(bLangParaDoc);
} else if (bLangDeprecationDoc != null) {
bLangDeprecatedParaDoc =
new BLangMarkDownDeprecatedParametersDocumentation();
bLangDeprecatedParaDoc.parameters.add(bLangParaDoc);
bLangDeprecationDoc = null;
} else {
parameters.add(bLangParaDoc);
}
break;
case MARKDOWN_RETURN_PARAMETER_DOCUMENTATION_LINE:
bLangReturnParaDoc = new BLangMarkdownReturnParameterDocumentation();
MarkdownParameterDocumentationLineNode returnParaDocLineNode =
(MarkdownParameterDocumentationLineNode) singleDocLine;
NodeList<Node> returnParaDocElements = returnParaDocLineNode.documentElements();
String returnParaDocText =
addReferencesAndReturnDocumentationText(references, returnParaDocElements);
bLangReturnParaDoc.returnParameterDocumentationLines.add(returnParaDocText);
bLangReturnParaDoc.pos = getPosition(returnParaDocLineNode);
doc.returnParameter = bLangReturnParaDoc;
break;
case MARKDOWN_DEPRECATION_DOCUMENTATION_LINE:
bLangDeprecationDoc = new BLangMarkDownDeprecationDocumentation();
MarkdownDocumentationLineNode deprecationDocLineNode =
(MarkdownDocumentationLineNode) singleDocLine;
String lineText = ((Token) deprecationDocLineNode.documentElements().get(0)).text();
bLangDeprecationDoc.addDeprecationLine("
bLangDeprecationDoc.pos = getPosition(deprecationDocLineNode);
break;
case MARKDOWN_CODE_BLOCK:
MarkdownCodeBlockNode codeBlockNode = (MarkdownCodeBlockNode) singleDocLine;
transformCodeBlock(documentationLines, codeBlockNode);
break;
default:
break;
}
}
doc.documentationLines = documentationLines;
doc.parameters = parameters;
doc.references = references;
doc.deprecationDocumentation = bLangDeprecationDoc;
doc.deprecatedParametersDocumentation = bLangDeprecatedParaDoc;
doc.pos = getPosition(markdownDocNode);
return doc;
}
private void transformCodeBlock(LinkedList<BLangMarkdownDocumentationLine> documentationLines,
MarkdownCodeBlockNode codeBlockNode) {
BLangMarkdownDocumentationLine bLangDocLine =
(BLangMarkdownDocumentationLine) TreeBuilder.createMarkdownDocumentationTextNode();
StringBuilder docText = new StringBuilder();
if (codeBlockNode.langAttribute().isPresent()) {
docText.append(codeBlockNode.startBacktick().text());
docText.append(codeBlockNode.langAttribute().get().toString());
} else {
docText.append(codeBlockNode.startBacktick().toString());
}
codeBlockNode.codeLines().forEach(codeLine -> docText.append(codeLine.toString()));
docText.append(codeBlockNode.endLineHashToken().toString());
docText.append(codeBlockNode.endBacktick().text());
bLangDocLine.text = docText.toString();
bLangDocLine.pos = getPosition(codeBlockNode.startLineHashToken());
documentationLines.add(bLangDocLine);
}
private String addReferencesAndReturnDocumentationText(LinkedList<BLangMarkdownReferenceDocumentation> references,
NodeList<Node> docElements) {
StringBuilder docText = new StringBuilder();
for (Node element : docElements) {
if (element.kind() == SyntaxKind.BALLERINA_NAME_REFERENCE) {
BLangMarkdownReferenceDocumentation bLangRefDoc = new BLangMarkdownReferenceDocumentation();
BallerinaNameReferenceNode balNameRefNode = (BallerinaNameReferenceNode) element;
bLangRefDoc.pos = getPosition(balNameRefNode);
Token startBacktick = balNameRefNode.startBacktick();
Node backtickContent = balNameRefNode.nameReference();
Token endBacktick = balNameRefNode.endBacktick();
String contentString = backtickContent.isMissing() ? "" : backtickContent.toString();
bLangRefDoc.referenceName = contentString;
bLangRefDoc.type = DocumentationReferenceType.BACKTICK_CONTENT;
Optional<Token> referenceType = balNameRefNode.referenceType();
referenceType.ifPresent(
refType -> {
bLangRefDoc.type = stringToRefType(refType.text());
docText.append(refType.toString());
}
);
transformDocumentationBacktickContent(backtickContent, bLangRefDoc);
docText.append(startBacktick.isMissing() ? "" : startBacktick.text());
docText.append(contentString);
docText.append(endBacktick.isMissing() ? "" : endBacktick.text());
references.add(bLangRefDoc);
} else if (element.kind() == SyntaxKind.DOCUMENTATION_DESCRIPTION) {
Token docDescription = (Token) element;
docText.append(docDescription.text());
} else if (element.kind() == SyntaxKind.INLINE_CODE_REFERENCE) {
InlineCodeReferenceNode inlineCodeRefNode = (InlineCodeReferenceNode) element;
docText.append(inlineCodeRefNode.startBacktick().text());
docText.append(inlineCodeRefNode.codeReference().text());
docText.append(inlineCodeRefNode.endBacktick().text());
}
}
return trimLeftAtMostOne(docText.toString());
}
private String trimLeftAtMostOne(String text) {
int countToStrip = 0;
if (!text.isEmpty() && Character.isWhitespace(text.charAt(0))) {
countToStrip = 1;
}
return text.substring(countToStrip);
}
private void transformDocumentationBacktickContent(Node backtickContent,
BLangMarkdownReferenceDocumentation bLangRefDoc) {
QualifiedNameReferenceNode qualifiedRef;
SimpleNameReferenceNode simpleRef;
switch (backtickContent.kind()) {
case CODE_CONTENT:
bLangRefDoc.hasParserWarnings = true;
break;
case QUALIFIED_NAME_REFERENCE:
qualifiedRef = (QualifiedNameReferenceNode) backtickContent;
bLangRefDoc.qualifier = qualifiedRef.modulePrefix().text();
bLangRefDoc.identifier = qualifiedRef.identifier().text();
break;
case SIMPLE_NAME_REFERENCE:
simpleRef = (SimpleNameReferenceNode) backtickContent;
bLangRefDoc.identifier = simpleRef.name().text();
break;
case FUNCTION_CALL:
Node funcName = (((FunctionCallExpressionNode) backtickContent).functionName());
if (funcName.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
qualifiedRef = (QualifiedNameReferenceNode) funcName;
bLangRefDoc.qualifier = qualifiedRef.modulePrefix().text();
bLangRefDoc.identifier = qualifiedRef.identifier().text();
} else {
simpleRef = (SimpleNameReferenceNode) funcName;
bLangRefDoc.identifier = simpleRef.name().text();
}
break;
case METHOD_CALL:
MethodCallExpressionNode methodCallExprNode = (MethodCallExpressionNode) backtickContent;
bLangRefDoc.identifier =
((SimpleNameReferenceNode) methodCallExprNode.methodName()).name().text();
Node refName = methodCallExprNode.expression();
if (refName.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
qualifiedRef = (QualifiedNameReferenceNode) refName;
bLangRefDoc.qualifier = qualifiedRef.modulePrefix().text();
bLangRefDoc.typeName = qualifiedRef.identifier().text();
} else {
simpleRef = (SimpleNameReferenceNode) refName;
bLangRefDoc.typeName = simpleRef.name().text();
}
break;
default:
throw new IllegalArgumentException("Invalid backtick content transformation");
}
}
private DocumentationReferenceType stringToRefType(String refTypeName) {
switch (refTypeName) {
case "type":
return DocumentationReferenceType.TYPE;
case "service":
return DocumentationReferenceType.SERVICE;
case "variable":
return DocumentationReferenceType.VARIABLE;
case "var":
return DocumentationReferenceType.VAR;
case "annotation":
return DocumentationReferenceType.ANNOTATION;
case "module":
return DocumentationReferenceType.MODULE;
case "function":
return DocumentationReferenceType.FUNCTION;
case "parameter":
return DocumentationReferenceType.PARAMETER;
case "const":
return DocumentationReferenceType.CONST;
default:
return DocumentationReferenceType.BACKTICK_CONTENT;
}
}
private Object getIntegerLiteral(Node literal, String nodeValue, SyntaxKind sign) {
SyntaxKind literalTokenKind = ((BasicLiteralNode) literal).literalToken().kind();
if (literalTokenKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN) {
return parseLong(literal, nodeValue, nodeValue, 10, sign, DiagnosticErrorCode.INTEGER_TOO_SMALL,
DiagnosticErrorCode.INTEGER_TOO_LARGE);
} else if (literalTokenKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) {
String processedNodeValue = nodeValue.toLowerCase().replace("0x", "");
return parseLong(literal, nodeValue, processedNodeValue, 16, sign,
DiagnosticErrorCode.HEXADECIMAL_TOO_SMALL, DiagnosticErrorCode.HEXADECIMAL_TOO_LARGE);
}
return null;
}
private Object parseLong(Node literal, String originalNodeValue,
String processedNodeValue, int radix, SyntaxKind sign,
DiagnosticCode code1, DiagnosticCode code2) {
try {
return Long.parseLong(processedNodeValue, radix);
} catch (Exception e) {
Location pos = getPosition(literal);
if (sign == SyntaxKind.MINUS_TOKEN) {
pos = new BLangDiagnosticLocation(pos.lineRange().filePath(),
pos.lineRange().startLine().line(),
pos.lineRange().endLine().line(),
pos.lineRange().startLine().offset() - 1,
pos.lineRange().endLine().offset());
dlog.error(pos, code1, originalNodeValue);
} else {
dlog.error(pos, code2, originalNodeValue);
}
}
return originalNodeValue;
}
private String getHexNodeValue(String value) {
if (!(value.contains("p") || value.contains("P"))) {
value = value + "p0";
}
return value;
}
private String fillWithZeros(String str) {
while (str.length() < 4) {
str = "0".concat(str);
}
return str;
}
private void markVariableWithFlag(BLangVariable variable, Flag flag) {
variable.flagSet.add(flag);
switch (variable.getKind()) {
case TUPLE_VARIABLE:
BLangTupleVariable tupleVariable = (BLangTupleVariable) variable;
for (BLangVariable var : tupleVariable.memberVariables) {
markVariableWithFlag(var, flag);
}
if (tupleVariable.restVariable != null) {
markVariableWithFlag(tupleVariable.restVariable, flag);
}
break;
case RECORD_VARIABLE:
BLangRecordVariable recordVariable = (BLangRecordVariable) variable;
for (BLangRecordVariableKeyValue keyValue : recordVariable.variableList) {
markVariableWithFlag(keyValue.getValue(), flag);
}
if (recordVariable.restParam != null) {
markVariableWithFlag((BLangVariable) recordVariable.restParam, flag);
}
break;
case ERROR_VARIABLE:
BLangErrorVariable errorVariable = (BLangErrorVariable) variable;
BLangSimpleVariable message = errorVariable.message;
if (message != null) {
markVariableWithFlag(message, flag);
}
BLangVariable cause = errorVariable.cause;
if (cause != null) {
markVariableWithFlag(cause, flag);
}
errorVariable.detail.forEach(entry -> markVariableWithFlag(entry.valueBindingPattern, flag));
if (errorVariable.restDetail != null) {
markVariableWithFlag(errorVariable.restDetail, flag);
}
break;
}
}
private boolean isSimpleLiteral(SyntaxKind syntaxKind) {
switch (syntaxKind) {
case STRING_LITERAL:
case NUMERIC_LITERAL:
case BOOLEAN_LITERAL:
case NIL_LITERAL:
case NULL_LITERAL:
return true;
default:
return false;
}
}
static boolean isType(SyntaxKind nodeKind) {
switch (nodeKind) {
case RECORD_TYPE_DESC:
case OBJECT_TYPE_DESC:
case NIL_TYPE_DESC:
case OPTIONAL_TYPE_DESC:
case ARRAY_TYPE_DESC:
case INT_TYPE_DESC:
case BYTE_TYPE_DESC:
case FLOAT_TYPE_DESC:
case DECIMAL_TYPE_DESC:
case STRING_TYPE_DESC:
case BOOLEAN_TYPE_DESC:
case XML_TYPE_DESC:
case JSON_TYPE_DESC:
case HANDLE_TYPE_DESC:
case ANY_TYPE_DESC:
case ANYDATA_TYPE_DESC:
case NEVER_TYPE_DESC:
case VAR_TYPE_DESC:
case SERVICE_TYPE_DESC:
case PARAMETERIZED_TYPE_DESC:
case UNION_TYPE_DESC:
case ERROR_TYPE_DESC:
case STREAM_TYPE_DESC:
case TABLE_TYPE_DESC:
case FUNCTION_TYPE_DESC:
case TUPLE_TYPE_DESC:
case PARENTHESISED_TYPE_DESC:
case READONLY_TYPE_DESC:
case DISTINCT_TYPE_DESC:
case INTERSECTION_TYPE_DESC:
case SINGLETON_TYPE_DESC:
case TYPE_REFERENCE_TYPE_DESC:
return true;
default:
return false;
}
}
private boolean isNumericLiteral(SyntaxKind syntaxKind) {
switch (syntaxKind) {
case NUMERIC_LITERAL:
return true;
default:
return false;
}
}
private boolean isPresent(Node node) {
return node.kind() != SyntaxKind.NONE;
}
private boolean checkIfAnonymous(Node node) {
SyntaxKind parentKind = node.parent().kind();
return parentKind != SyntaxKind.DISTINCT_TYPE_DESC && parentKind != SyntaxKind.TYPE_DEFINITION;
}
private boolean ifInLocalContext(Node parent) {
while (parent != null) {
if (parent instanceof StatementNode) {
return true;
}
parent = parent.parent();
}
return false;
}
private BLangType createAnonymousRecordType(RecordTypeDescriptorNode recordTypeDescriptorNode,
BLangRecordTypeNode recordTypeNode) {
BLangTypeDefinition typeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
Location pos = getPosition(recordTypeDescriptorNode);
String genName = anonymousModelHelper.getNextAnonymousTypeKey(this.packageID);
IdentifierNode anonTypeGenName = createIdentifier(pos, genName, null);
typeDef.setName(anonTypeGenName);
typeDef.flagSet.add(Flag.PUBLIC);
typeDef.flagSet.add(Flag.ANONYMOUS);
typeDef.typeNode = recordTypeNode;
typeDef.pos = pos;
addToTop(typeDef);
return createUserDefinedType(pos, (BLangIdentifier) TreeBuilder.createIdentifierNode(), typeDef.name);
}
private BLangUserDefinedType createUserDefinedType(Location pos,
BLangIdentifier pkgAlias,
BLangIdentifier name) {
BLangUserDefinedType userDefinedType = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode();
userDefinedType.pos = pos;
userDefinedType.pkgAlias = pkgAlias;
userDefinedType.typeName = name;
return userDefinedType;
}
private boolean withinByteRange(Object num) {
if (num instanceof Long) {
return (Long) num <= 255 && (Long) num >= 0;
}
return false;
}
private class SimpleVarBuilder {
private BLangIdentifier name;
private BLangType type;
private boolean isDeclaredWithVar;
private Set<Flag> flags = new HashSet<>();
private boolean isFinal;
private ExpressionNode expr;
private Location pos;
public BLangSimpleVariable build() {
BLangSimpleVariable bLSimpleVar = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
bLSimpleVar.setName(this.name);
bLSimpleVar.setTypeNode(this.type);
bLSimpleVar.isDeclaredWithVar = this.isDeclaredWithVar;
bLSimpleVar.setTypeNode(this.type);
bLSimpleVar.flagSet.addAll(this.flags);
if (this.isFinal) {
markVariableWithFlag(bLSimpleVar, Flag.FINAL);
}
bLSimpleVar.setInitialExpression(this.expr);
bLSimpleVar.pos = pos;
return bLSimpleVar;
}
public SimpleVarBuilder with(String name) {
this.name = createIdentifier(null, name);
return this;
}
public SimpleVarBuilder with(String name, Location identifierPos) {
this.name = createIdentifier(identifierPos, name);
return this;
}
public SimpleVarBuilder with(Token token) {
this.name = createIdentifier(token);
return this;
}
public SimpleVarBuilder setTypeByNode(Node typeName) {
this.isDeclaredWithVar = typeName == null || typeName.kind() == SyntaxKind.VAR_TYPE_DESC;
if (typeName == null) {
return this;
}
this.type = createTypeNode(typeName);
return this;
}
public SimpleVarBuilder setExpressionByNode(Node initExprNode) {
this.expr = initExprNode != null ? createExpression(initExprNode) : null;
return this;
}
public SimpleVarBuilder setExpression(ExpressionNode expression) {
this.expr = expression;
return this;
}
public SimpleVarBuilder isDeclaredWithVar() {
this.isDeclaredWithVar = true;
return this;
}
public SimpleVarBuilder isFinal() {
this.isFinal = true;
return this;
}
public SimpleVarBuilder isListenerVar() {
this.flags.add(Flag.LISTENER);
this.flags.add(Flag.FINAL);
return this;
}
public SimpleVarBuilder setVisibility(Token visibilityQualifier) {
if (visibilityQualifier != null) {
if (visibilityQualifier.kind() == SyntaxKind.PRIVATE_KEYWORD) {
this.flags.add(Flag.PRIVATE);
} else if (visibilityQualifier.kind() == SyntaxKind.PUBLIC_KEYWORD) {
this.flags.add(Flag.PUBLIC);
}
}
return this;
}
public SimpleVarBuilder setFinal(boolean present) {
this.isFinal = present;
return this;
}
public SimpleVarBuilder setOptional(boolean present) {
if (present) {
this.flags.add(Flag.PUBLIC);
} else {
this.flags.remove(Flag.PUBLIC);
}
return this;
}
public SimpleVarBuilder setRequired(boolean present) {
if (present) {
this.flags.add(Flag.REQUIRED);
} else {
this.flags.remove(Flag.REQUIRED);
}
return this;
}
public SimpleVarBuilder isPublic() {
this.flags.add(Flag.PUBLIC);
return this;
}
public SimpleVarBuilder isWorkerVar() {
this.flags.add(Flag.WORKER);
return this;
}
public SimpleVarBuilder setPos(Location pos) {
this.pos = pos;
return this;
}
}
private void addFinalQualifier(BLangSimpleVariable simpleVar) {
simpleVar.flagSet.add(Flag.FINAL);
}
private void addToTop(TopLevelNode topLevelNode) {
if (currentCompilationUnit != null) {
currentCompilationUnit.addTopLevelNode(topLevelNode);
}
}
private Location expandLeft(Location location, Location upTo) {
assert location.lineRange().startLine().line() > upTo.lineRange().startLine().line() ||
(location.lineRange().startLine().line() == upTo.lineRange().startLine().line() &&
location.lineRange().startLine().offset() >= upTo.lineRange().startLine().offset());
Location expandedLocation = new BLangDiagnosticLocation(location.lineRange().filePath(),
upTo.lineRange().startLine().line(),
location.lineRange().endLine().line(),
upTo.lineRange().startLine().offset(),
location.lineRange().endLine().offset());
return expandedLocation;
}
private Location trimLeft(Location location, Location upTo) {
assert location.lineRange().startLine().line() < upTo.lineRange().startLine().line() ||
(location.lineRange().startLine().line() == upTo.lineRange().startLine().line() &&
location.lineRange().startLine().offset() <= upTo.lineRange().startLine().offset());
Location trimmedLocation = new BLangDiagnosticLocation(location.lineRange().filePath(),
upTo.lineRange().startLine().line(),
location.lineRange().endLine().line(),
upTo.lineRange().startLine().offset(),
location.lineRange().endLine().offset());
return trimmedLocation;
}
private Location trimRight(Location location, Location upTo) {
assert location.lineRange().endLine().line() > upTo.lineRange().endLine().line() ||
(location.lineRange().endLine().line() == upTo.lineRange().endLine().line() &&
location.lineRange().endLine().offset() >= upTo.lineRange().endLine().offset());
Location trimmedLocation = new BLangDiagnosticLocation(location.lineRange().filePath(),
location.lineRange().startLine().line(),
upTo.lineRange().endLine().line(),
location.lineRange().startLine().offset(),
upTo.lineRange().endLine().offset());
return trimmedLocation;
}
} |
Can't/shouldn't we call `analyzeNode` instead of directly calling `accept`? | public void visit(BLangMappingMatchPattern mappingMatchPattern) {
BRecordTypeSymbol recordSymbol =
Symbols.createRecordSymbol(0, names.fromString("$anonRecordType$" + recordCount++),
env.enclPkg.symbol.pkgID, null, env.scope.owner, mappingMatchPattern.pos, VIRTUAL);
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
for (BLangFieldMatchPattern fieldMatchPattern : mappingMatchPattern.fieldMatchPatterns) {
fieldMatchPattern.accept(this);
String fieldName = fieldMatchPattern.fieldName.value;
BVarSymbol fieldSymbol = new BVarSymbol(0, names.fromString(fieldName), env.enclPkg.symbol.pkgID,
fieldMatchPattern.matchPattern.type, recordSymbol, fieldMatchPattern.pos, COMPILED_SOURCE);
BField field = new BField(names.fromString(fieldName), fieldMatchPattern.pos, fieldSymbol);
fields.put(fieldName, field);
mappingMatchPattern.declaredVars.putAll(fieldMatchPattern.declaredVars);
}
BRecordType recordVarType = new BRecordType(recordSymbol);
recordVarType.fields = fields;
recordVarType.restFieldType = symTable.anydataType;
if (mappingMatchPattern.restMatchPattern != null) {
BLangRestMatchPattern restMatchPattern = mappingMatchPattern.restMatchPattern;
restMatchPattern.type = new BMapType(TypeTags.MAP, symTable.anydataType, null);
restMatchPattern.accept(this);
mappingMatchPattern.declaredVars.put(restMatchPattern.variableName.value, restMatchPattern.symbol);
}
mappingMatchPattern.type = types.resolvePatternTypeFromMatchExpr(mappingMatchPattern,
recordVarType, env);
assignTypesToMemberPatterns(mappingMatchPattern, mappingMatchPattern.type);
} | fieldMatchPattern.accept(this); | public void visit(BLangMappingMatchPattern mappingMatchPattern) {
EnumSet<Flag> flags = EnumSet.of(Flag.PUBLIC, Flag.ANONYMOUS);
BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(Flags.asMask(flags), Names.EMPTY,
env.enclPkg.packageID, null, env.scope.owner, mappingMatchPattern.pos, VIRTUAL);
recordSymbol.name = names.fromString(anonModelHelper.getNextAnonymousTypeKey(env.enclPkg.packageID));
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
for (BLangFieldMatchPattern fieldMatchPattern : mappingMatchPattern.fieldMatchPatterns) {
analyzeNode(fieldMatchPattern, env);
String fieldName = fieldMatchPattern.fieldName.value;
BVarSymbol fieldSymbol = new BVarSymbol(0, names.fromString(fieldName), env.enclPkg.symbol.pkgID,
fieldMatchPattern.matchPattern.type, recordSymbol, fieldMatchPattern.pos, COMPILED_SOURCE);
BField field = new BField(names.fromString(fieldName), fieldMatchPattern.pos, fieldSymbol);
fields.put(fieldName, field);
mappingMatchPattern.declaredVars.putAll(fieldMatchPattern.declaredVars);
}
BRecordType recordVarType = new BRecordType(recordSymbol);
recordVarType.fields = fields;
recordVarType.restFieldType = symTable.anydataType;
if (mappingMatchPattern.restMatchPattern != null) {
BLangRestMatchPattern restMatchPattern = mappingMatchPattern.restMatchPattern;
restMatchPattern.type = new BMapType(TypeTags.MAP, symTable.anydataType, null);
analyzeNode(restMatchPattern, env);
mappingMatchPattern.declaredVars.put(restMatchPattern.variableName.value, restMatchPattern.symbol);
}
mappingMatchPattern.type = types.resolvePatternTypeFromMatchExpr(mappingMatchPattern,
recordVarType, env);
assignTypesToMemberPatterns(mappingMatchPattern, mappingMatchPattern.type);
} | class SemanticAnalyzer extends BLangNodeVisitor {
private static final CompilerContext.Key<SemanticAnalyzer> SYMBOL_ANALYZER_KEY =
new CompilerContext.Key<>();
private static final String ANONYMOUS_RECORD_NAME = "anonymous-record";
private static final String NULL_LITERAL = "null";
private static final String LEFT_BRACE = "{";
private static final String RIGHT_BRACE = "}";
private static final String SPACE = " ";
public static final String COLON = ":";
private static final String LISTENER_TYPE_NAME = "lang.object:Listener";
private static final String LISTENER_NAME = "listener";
private SymbolTable symTable;
private SymbolEnter symbolEnter;
private Names names;
private SymbolResolver symResolver;
private TypeChecker typeChecker;
private Types types;
private BLangDiagnosticLog dlog;
private TypeNarrower typeNarrower;
private ConstantAnalyzer constantAnalyzer;
private ConstantValueResolver constantValueResolver;
private SymbolEnv env;
private BType expType;
private DiagnosticCode diagCode;
private BType resType;
private Map<BVarSymbol, BType.NarrowedTypes> narrowedTypeInfo;
private Stack<SymbolEnv> prevEnvs = new Stack<>();
private int recordCount = 0;
public static SemanticAnalyzer getInstance(CompilerContext context) {
SemanticAnalyzer semAnalyzer = context.get(SYMBOL_ANALYZER_KEY);
if (semAnalyzer == null) {
semAnalyzer = new SemanticAnalyzer(context);
}
return semAnalyzer;
}
public SemanticAnalyzer(CompilerContext context) {
context.put(SYMBOL_ANALYZER_KEY, this);
this.symTable = SymbolTable.getInstance(context);
this.symbolEnter = SymbolEnter.getInstance(context);
this.names = Names.getInstance(context);
this.symResolver = SymbolResolver.getInstance(context);
this.typeChecker = TypeChecker.getInstance(context);
this.types = Types.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
this.typeNarrower = TypeNarrower.getInstance(context);
this.constantAnalyzer = ConstantAnalyzer.getInstance(context);
this.constantValueResolver = ConstantValueResolver.getInstance(context);
}
public BLangPackage analyze(BLangPackage pkgNode) {
this.dlog.setCurrentPackageId(pkgNode.packageID);
pkgNode.accept(this);
return pkgNode;
}
public void visit(BLangPackage pkgNode) {
if (pkgNode.completedPhases.contains(CompilerPhase.TYPE_CHECK)) {
return;
}
SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgNode.symbol);
pkgNode.topLevelNodes.stream().filter(pkgLevelNode -> pkgLevelNode.getKind() == NodeKind.CONSTANT)
.forEach(constant -> analyzeDef((BLangNode) constant, pkgEnv));
this.constantValueResolver.resolve(pkgNode.constants, pkgNode.packageID);
for (int i = 0; i < pkgNode.topLevelNodes.size(); i++) {
TopLevelNode pkgLevelNode = pkgNode.topLevelNodes.get(i);
NodeKind kind = pkgLevelNode.getKind();
if (kind == NodeKind.CONSTANT ||
((kind == NodeKind.FUNCTION && ((BLangFunction) pkgLevelNode).flagSet.contains(Flag.LAMBDA)))) {
continue;
}
analyzeDef((BLangNode) pkgLevelNode, pkgEnv);
}
while (pkgNode.lambdaFunctions.peek() != null) {
BLangLambdaFunction lambdaFunction = pkgNode.lambdaFunctions.poll();
BLangFunction function = lambdaFunction.function;
lambdaFunction.type = function.symbol.type;
analyzeDef(lambdaFunction.function, lambdaFunction.capturedClosureEnv);
}
pkgNode.getTestablePkgs().forEach(testablePackage -> visit((BLangPackage) testablePackage));
pkgNode.completedPhases.add(CompilerPhase.TYPE_CHECK);
}
public void visit(BLangXMLNS xmlnsNode) {
xmlnsNode.type = symTable.stringType;
if (xmlnsNode.symbol == null) {
symbolEnter.defineNode(xmlnsNode, env);
}
typeChecker.checkExpr(xmlnsNode.namespaceURI, env, symTable.stringType);
}
public void visit(BLangXMLNSStatement xmlnsStmtNode) {
analyzeNode(xmlnsStmtNode.xmlnsDecl, env);
}
public void visit(BLangFunction funcNode) {
SymbolEnv funcEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env);
funcNode.symbol.params.forEach(param -> param.flags |= Flags.FUNCTION_FINAL);
if (!funcNode.flagSet.contains(Flag.WORKER)) {
funcNode.annAttachments.forEach(annotationAttachment -> {
if (Symbols.isFlagOn(funcNode.symbol.flags, Flags.RESOURCE)) {
annotationAttachment.attachPoints.add(AttachPoint.Point.RESOURCE);
} else if (funcNode.attachedFunction) {
annotationAttachment.attachPoints.add(AttachPoint.Point.OBJECT_METHOD);
}
annotationAttachment.attachPoints.add(AttachPoint.Point.FUNCTION);
this.analyzeDef(annotationAttachment, funcEnv);
});
validateAnnotationAttachmentCount(funcNode.annAttachments);
}
if (funcNode.returnTypeNode != null) {
funcNode.returnTypeAnnAttachments.forEach(annotationAttachment -> {
annotationAttachment.attachPoints.add(AttachPoint.Point.RETURN);
this.analyzeDef(annotationAttachment, funcEnv);
});
validateAnnotationAttachmentCount(funcNode.returnTypeAnnAttachments);
}
boolean inIsolatedFunction = funcNode.flagSet.contains(Flag.ISOLATED);
for (BLangSimpleVariable param : funcNode.requiredParams) {
symbolEnter.defineExistingVarSymbolInEnv(param.symbol, funcNode.clonedEnv);
this.analyzeDef(param, funcNode.clonedEnv);
if (param.expr != null) {
funcNode.symbol.paramDefaultValTypes.put(param.symbol.name.value, param.expr.type);
((BInvokableTypeSymbol) funcNode.type.tsymbol).paramDefaultValTypes.put(param.symbol.name.value,
param.expr.type);
}
validateIsolatedParamUsage(inIsolatedFunction, param, false);
}
BLangSimpleVariable restParam = funcNode.restParam;
if (restParam != null) {
symbolEnter.defineExistingVarSymbolInEnv(restParam.symbol, funcNode.clonedEnv);
this.analyzeDef(restParam, funcNode.clonedEnv);
validateIsolatedParamUsage(inIsolatedFunction, restParam, true);
}
validateObjectAttachedFunction(funcNode);
if (funcNode.hasBody()) {
analyzeNode(funcNode.body, funcEnv, funcNode.returnTypeNode.type, null);
}
if (funcNode.anonForkName != null) {
funcNode.symbol.enclForkName = funcNode.anonForkName;
}
funcNode.symbol.annAttachments.addAll(funcNode.annAttachments);
this.processWorkers(funcNode, funcEnv);
}
private void processWorkers(BLangInvokableNode invNode, SymbolEnv invEnv) {
if (invNode.workers.size() > 0) {
invEnv.scope.entries.putAll(invNode.body.scope.entries);
for (BLangWorker worker : invNode.workers) {
this.symbolEnter.defineNode(worker, invEnv);
}
for (BLangWorker e : invNode.workers) {
analyzeNode(e, invEnv);
}
}
}
@Override
public void visit(BLangBlockFunctionBody body) {
env = SymbolEnv.createFuncBodyEnv(body, env);
for (BLangStatement stmt : body.stmts) {
analyzeStmt(stmt, env);
}
}
@Override
public void visit(BLangExprFunctionBody body) {
env = SymbolEnv.createFuncBodyEnv(body, env);
typeChecker.checkExpr(body.expr, env, expType);
}
@Override
public void visit(BLangExternalFunctionBody body) {
for (BLangAnnotationAttachment annotationAttachment : body.annAttachments) {
annotationAttachment.attachPoints.add(AttachPoint.Point.EXTERNAL);
this.analyzeDef(annotationAttachment, env);
}
validateAnnotationAttachmentCount(body.annAttachments);
}
@Override
public void visit(BLangTypeDefinition typeDefinition) {
if (typeDefinition.typeNode.getKind() == NodeKind.OBJECT_TYPE
|| typeDefinition.typeNode.getKind() == NodeKind.RECORD_TYPE
|| typeDefinition.typeNode.getKind() == NodeKind.ERROR_TYPE
|| typeDefinition.typeNode.getKind() == NodeKind.FINITE_TYPE_NODE) {
analyzeDef(typeDefinition.typeNode, env);
}
typeDefinition.annAttachments.forEach(annotationAttachment -> {
if (typeDefinition.typeNode.getKind() == NodeKind.OBJECT_TYPE) {
annotationAttachment.attachPoints.add(AttachPoint.Point.OBJECT);
}
annotationAttachment.attachPoints.add(AttachPoint.Point.TYPE);
annotationAttachment.accept(this);
});
validateAnnotationAttachmentCount(typeDefinition.annAttachments);
validateBuiltinTypeAnnotationAttachment(typeDefinition.annAttachments);
}
@Override
public void visit(BLangClassDefinition classDefinition) {
classDefinition.annAttachments.forEach(annotationAttachment -> {
annotationAttachment.attachPoints.add(AttachPoint.Point.CLASS);
annotationAttachment.accept(this);
});
validateAnnotationAttachmentCount(classDefinition.annAttachments);
analyzeClassDefinition(classDefinition);
}
private void analyzeClassDefinition(BLangClassDefinition classDefinition) {
SymbolEnv classEnv = SymbolEnv.createClassEnv(classDefinition, classDefinition.symbol.scope, env);
for (BLangSimpleVariable field : classDefinition.fields) {
analyzeDef(field, classEnv);
}
for (BLangFunction function : classDefinition.functions) {
analyzeDef(function, env);
if (function.flagSet.contains(Flag.RESOURCE) && function.flagSet.contains(Flag.NATIVE)) {
this.dlog.error(function.pos, DiagnosticCode.RESOURCE_FUNCTION_CANNOT_BE_EXTERN, function.name);
}
}
for (BAttachedFunction func : ((BObjectTypeSymbol) classDefinition.symbol).referencedFunctions) {
validateReferencedFunction(classDefinition.pos, func, env);
}
analyzerClassInitMethod(classDefinition);
}
private void analyzerClassInitMethod(BLangClassDefinition classDefinition) {
if (classDefinition.initFunction == null) {
return;
}
if (classDefinition.initFunction.flagSet.contains(Flag.PRIVATE)) {
this.dlog.error(classDefinition.initFunction.pos, DiagnosticCode.PRIVATE_OBJECT_CONSTRUCTOR,
classDefinition.symbol.name);
return;
}
if (classDefinition.initFunction.flagSet.contains(Flag.NATIVE)) {
this.dlog.error(classDefinition.initFunction.pos, DiagnosticCode.OBJECT_INIT_FUNCTION_CANNOT_BE_EXTERN,
classDefinition.symbol.name);
return;
}
analyzeDef(classDefinition.initFunction, env);
}
public void visit(BLangTypeConversionExpr conversionExpr) {
conversionExpr.annAttachments.forEach(annotationAttachment -> {
annotationAttachment.attachPoints.add(AttachPoint.Point.TYPE);
if (conversionExpr.typeNode.getKind() == NodeKind.OBJECT_TYPE) {
annotationAttachment.attachPoints.add(AttachPoint.Point.OBJECT);
}
annotationAttachment.accept(this);
});
validateAnnotationAttachmentCount(conversionExpr.annAttachments);
}
@Override
public void visit(BLangFiniteTypeNode finiteTypeNode) {
finiteTypeNode.valueSpace.forEach(val -> {
if (val.type.tag == TypeTags.NIL && NULL_LITERAL.equals(((BLangLiteral) val).originalValue)) {
dlog.error(val.pos, DiagnosticCode.INVALID_USE_OF_NULL_LITERAL);
}
});
}
@Override
public void visit(BLangObjectTypeNode objectTypeNode) {
SymbolEnv objectEnv = SymbolEnv.createTypeEnv(objectTypeNode, objectTypeNode.symbol.scope, env);
objectTypeNode.fields.forEach(field -> {
analyzeDef(field, objectEnv);
if (field.flagSet.contains(Flag.PRIVATE)) {
this.dlog.error(field.pos, DiagnosticCode.PRIVATE_FIELD_ABSTRACT_OBJECT, field.symbol.name);
}
});
objectTypeNode.functions.forEach(func -> {
analyzeDef(func, env);
if (func.flagSet.contains(Flag.PRIVATE)) {
this.dlog.error(func.pos, DiagnosticCode.PRIVATE_FUNC_ABSTRACT_OBJECT, func.name,
objectTypeNode.symbol.name);
}
if (func.flagSet.contains(Flag.NATIVE)) {
this.dlog.error(func.pos, DiagnosticCode.EXTERN_FUNC_ABSTRACT_OBJECT, func.name,
objectTypeNode.symbol.name);
}
if (func.flagSet.contains(Flag.RESOURCE) && func.flagSet.contains(Flag.NATIVE)) {
this.dlog.error(func.pos, DiagnosticCode.RESOURCE_FUNCTION_CANNOT_BE_EXTERN, func.name);
}
});
((BObjectTypeSymbol) objectTypeNode.symbol).referencedFunctions
.forEach(func -> validateReferencedFunction(objectTypeNode.pos, func, env));
if (objectTypeNode.initFunction == null) {
return;
}
if (objectTypeNode.initFunction.flagSet.contains(Flag.PRIVATE)) {
this.dlog.error(objectTypeNode.initFunction.pos, DiagnosticCode.PRIVATE_OBJECT_CONSTRUCTOR,
objectTypeNode.symbol.name);
return;
}
this.dlog.error(objectTypeNode.initFunction.pos, DiagnosticCode.ABSTRACT_OBJECT_CONSTRUCTOR,
objectTypeNode.symbol.name);
}
@Override
public void visit(BLangRecordTypeNode recordTypeNode) {
SymbolEnv recordEnv = SymbolEnv.createTypeEnv(recordTypeNode, recordTypeNode.symbol.scope, env);
recordTypeNode.fields.forEach(field -> analyzeDef(field, recordEnv));
validateOptionalNeverTypedField(recordTypeNode);
validateDefaultable(recordTypeNode);
recordTypeNode.analyzed = true;
}
@Override
public void visit(BLangErrorType errorType) {
if (errorType.detailType == null) {
return;
}
BType detailType = errorType.detailType.type;
if (!types.isValidErrorDetailType(detailType)) {
dlog.error(errorType.detailType.pos, DiagnosticCode.INVALID_ERROR_DETAIL_TYPE, errorType.detailType,
symTable.detailType);
}
}
public void visit(BLangAnnotation annotationNode) {
annotationNode.annAttachments.forEach(annotationAttachment -> {
annotationAttachment.attachPoints.add(AttachPoint.Point.ANNOTATION);
annotationAttachment.accept(this);
});
validateAnnotationAttachmentCount(annotationNode.annAttachments);
}
public void visit(BLangAnnotationAttachment annAttachmentNode) {
BSymbol symbol = this.symResolver.resolveAnnotation(annAttachmentNode.pos, env,
names.fromString(annAttachmentNode.pkgAlias.getValue()),
names.fromString(annAttachmentNode.getAnnotationName().getValue()));
if (symbol == this.symTable.notFoundSymbol) {
this.dlog.error(annAttachmentNode.pos, DiagnosticCode.UNDEFINED_ANNOTATION,
annAttachmentNode.getAnnotationName().getValue());
return;
}
BAnnotationSymbol annotationSymbol = (BAnnotationSymbol) symbol;
annAttachmentNode.annotationSymbol = annotationSymbol;
if (annotationSymbol.maskedPoints > 0 &&
!Symbols.isAttachPointPresent(annotationSymbol.maskedPoints,
AttachPoints.asMask(annAttachmentNode.attachPoints))) {
String msg = annAttachmentNode.attachPoints.stream()
.map(point -> point.name().toLowerCase())
.collect(Collectors.joining(", "));
this.dlog.error(annAttachmentNode.pos, DiagnosticCode.ANNOTATION_NOT_ALLOWED, annotationSymbol, msg);
}
validateAnnotationAttachmentExpr(annAttachmentNode, annotationSymbol);
}
public void visit(BLangSimpleVariable varNode) {
if (varNode.isDeclaredWithVar) {
validateWorkerAnnAttachments(varNode.expr);
handleDeclaredWithVar(varNode);
transferForkFlag(varNode);
return;
}
if (shouldInferErrorType(varNode)) {
validateWorkerAnnAttachments(varNode.expr);
handleDeclaredWithVar(varNode);
transferForkFlag(varNode);
if (!types.isAssignable(varNode.type, symTable.errorType)) {
dlog.error(varNode.pos, DiagnosticCode.INCOMPATIBLE_TYPES, symTable.errorType, varNode.type);
}
return;
}
int ownerSymTag = env.scope.owner.tag;
if ((ownerSymTag & SymTag.INVOKABLE) == SymTag.INVOKABLE || (ownerSymTag & SymTag.LET) == SymTag.LET) {
if (varNode.symbol == null) {
analyzeVarNode(varNode, env, AttachPoint.Point.VAR);
} else {
analyzeVarNode(varNode, env, AttachPoint.Point.PARAMETER);
}
} else if ((ownerSymTag & SymTag.OBJECT) == SymTag.OBJECT) {
analyzeVarNode(varNode, env, AttachPoint.Point.OBJECT_FIELD, AttachPoint.Point.FIELD);
} else if ((ownerSymTag & SymTag.RECORD) == SymTag.RECORD) {
analyzeVarNode(varNode, env, AttachPoint.Point.RECORD_FIELD, AttachPoint.Point.FIELD);
} else {
varNode.annAttachments.forEach(annotationAttachment -> {
if (Symbols.isFlagOn(varNode.symbol.flags, Flags.LISTENER)) {
annotationAttachment.attachPoints.add(AttachPoint.Point.LISTENER);
} else if (Symbols.isFlagOn(varNode.symbol.flags, Flags.SERVICE)) {
annotationAttachment.attachPoints.add(AttachPoint.Point.SERVICE);
} else {
annotationAttachment.attachPoints.add(AttachPoint.Point.VAR);
}
annotationAttachment.accept(this);
});
}
validateAnnotationAttachmentCount(varNode.annAttachments);
validateWorkerAnnAttachments(varNode.expr);
if (isIgnoredOrEmpty(varNode)) {
varNode.symbol = new BVarSymbol(0, Names.IGNORE, env.enclPkg.packageID, symTable.anyType, env.scope.owner,
varNode.pos, VIRTUAL);
}
BType lhsType = varNode.symbol.type;
varNode.type = lhsType;
BLangExpression rhsExpr = varNode.expr;
if (rhsExpr == null) {
if (lhsType.tag == TypeTags.ARRAY && typeChecker.isArrayOpenSealedType((BArrayType) lhsType)) {
dlog.error(varNode.pos, DiagnosticCode.SEALED_ARRAY_TYPE_NOT_INITIALIZED);
}
return;
}
SymbolEnv varInitEnv = SymbolEnv.createVarInitEnv(varNode, env, varNode.symbol);
typeChecker.checkExpr(rhsExpr, varInitEnv, lhsType);
if (Symbols.isFlagOn(varNode.symbol.flags, Flags.LISTENER) &&
!types.checkListenerCompatibility(varNode.symbol.type)) {
dlog.error(varNode.pos, DiagnosticCode.INVALID_LISTENER_VARIABLE, varNode.name);
}
transferForkFlag(varNode);
}
private boolean shouldInferErrorType(BLangSimpleVariable varNode) {
return varNode.typeNode != null
&& varNode.typeNode.getKind() == NodeKind.ERROR_TYPE
&& ((BLangErrorType) varNode.typeNode).inferErrorType;
}
private void analyzeVarNode(BLangSimpleVariable varNode, SymbolEnv env, AttachPoint.Point... attachPoints) {
if (varNode.symbol == null) {
symbolEnter.defineNode(varNode, env);
}
if (varNode.typeNode != null && varNode.typeNode.getKind() == NodeKind.RECORD_TYPE &&
!((BLangRecordTypeNode) varNode.typeNode).analyzed) {
analyzeDef(varNode.typeNode, env);
}
List<AttachPoint.Point> attachPointsList = Arrays.asList(attachPoints);
for (BLangAnnotationAttachment annotationAttachment : varNode.annAttachments) {
annotationAttachment.attachPoints.addAll(attachPointsList);
annotationAttachment.accept(this);
}
}
private void transferForkFlag(BLangSimpleVariable varNode) {
if (varNode.expr != null && varNode.expr.getKind() == NodeKind.INVOCATION
&& varNode.flagSet.contains(Flag.WORKER)) {
BLangInvocation expr = (BLangInvocation) varNode.expr;
if (expr.name.value.startsWith("0") && (expr.symbol.flags & Flags.FORKED) == Flags.FORKED) {
varNode.symbol.flags |= Flags.FORKED;
}
}
}
/**
* Validate annotation attachment of the `start` action or workers.
*
* @param expr expression to be validated.
*/
private void validateWorkerAnnAttachments(BLangExpression expr) {
if (expr != null && expr instanceof BLangInvocation.BLangActionInvocation &&
((BLangInvocation.BLangActionInvocation) expr).async) {
((BLangInvocation) expr).annAttachments.forEach(annotationAttachment -> {
annotationAttachment.attachPoints.add(AttachPoint.Point.WORKER);
annotationAttachment.accept(this);
});
validateAnnotationAttachmentCount(((BLangInvocation) expr).annAttachments);
}
}
public void visit(BLangRecordVariable varNode) {
if (varNode.isDeclaredWithVar) {
handleDeclaredWithVar(varNode);
return;
}
if (varNode.type == null) {
varNode.type = symResolver.resolveTypeNode(varNode.typeNode, env);
}
if (!validateRecordVariable(varNode)) {
varNode.type = symTable.semanticError;
return;
}
symbolEnter.defineNode(varNode, env);
if (varNode.expr == null) {
return;
}
typeChecker.checkExpr(varNode.expr, env, varNode.type);
}
public void visit(BLangTupleVariable varNode) {
if (varNode.isDeclaredWithVar) {
expType = resolveTupleType(varNode);
handleDeclaredWithVar(varNode);
return;
}
if (varNode.type == null) {
varNode.type = symResolver.resolveTypeNode(varNode.typeNode, env);
}
if (!(checkTypeAndVarCountConsistency(varNode))) {
varNode.type = symTable.semanticError;
return;
}
symbolEnter.defineNode(varNode, env);
if (varNode.expr == null) {
return;
}
typeChecker.checkExpr(varNode.expr, env, varNode.type);
}
private BType resolveTupleType(BLangTupleVariable varNode) {
List<BType> memberTypes = new ArrayList<>(varNode.memberVariables.size());
for (BLangVariable memberVariable : varNode.memberVariables) {
if (memberVariable.getKind() == NodeKind.TUPLE_VARIABLE) {
memberTypes.add(resolveTupleType((BLangTupleVariable) memberVariable));
} else {
memberTypes.add(symTable.noType);
}
}
return new BTupleType(memberTypes);
}
public void visit(BLangErrorVariable varNode) {
if (varNode.isDeclaredWithVar) {
handleDeclaredWithVar(varNode);
return;
}
if (varNode.type == null) {
varNode.type = symResolver.resolveTypeNode(varNode.typeNode, env);
}
if (!varNode.reasonVarPrefixAvailable && varNode.type == null) {
BErrorType errorType = new BErrorType(varNode.type.tsymbol, null);
if (varNode.type.tag == TypeTags.UNION) {
Set<BType> members = types.expandAndGetMemberTypesRecursive(varNode.type);
List<BErrorType> errorMembers = members.stream()
.filter(m -> m.tag == TypeTags.ERROR)
.map(m -> (BErrorType) m)
.collect(Collectors.toList());
if (errorMembers.isEmpty()) {
dlog.error(varNode.pos, DiagnosticCode.INVALID_ERROR_MATCH_PATTERN);
return;
} else if (errorMembers.size() == 1) {
errorType.detailType = errorMembers.get(0).detailType;
} else {
errorType.detailType = symTable.detailType;
}
varNode.type = errorType;
} else if (varNode.type.tag == TypeTags.ERROR) {
errorType.detailType = ((BErrorType) varNode.type).detailType;
}
}
if (!validateErrorVariable(varNode)) {
varNode.type = symTable.semanticError;
return;
}
symbolEnter.defineNode(varNode, env);
if (varNode.expr == null) {
return;
}
typeChecker.checkExpr(varNode.expr, env, varNode.type);
}
private void handleDeclaredWithVar(BLangVariable variable) {
BLangExpression varRefExpr = variable.expr;
BType rhsType;
if (varRefExpr == null) {
rhsType = symTable.semanticError;
variable.type = symTable.semanticError;
dlog.error(variable.pos, DiagnosticCode.VARIABLE_DECL_WITH_VAR_WITHOUT_INITIALIZER);
} else {
rhsType = typeChecker.checkExpr(varRefExpr, this.env, expType);
}
switch (variable.getKind()) {
case VARIABLE:
case LET_VARIABLE:
if (!validateObjectTypeInitInvocation(varRefExpr)) {
rhsType = symTable.semanticError;
}
if (variable.flagSet.contains(Flag.LISTENER) && !types.checkListenerCompatibility(rhsType)) {
dlog.error(varRefExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, LISTENER_TYPE_NAME, rhsType);
return;
}
BLangSimpleVariable simpleVariable = (BLangSimpleVariable) variable;
Name varName = names.fromIdNode(simpleVariable.name);
if (varName == Names.IGNORE) {
dlog.error(simpleVariable.pos, DiagnosticCode.NO_NEW_VARIABLES_VAR_ASSIGNMENT);
return;
}
simpleVariable.type = rhsType;
int ownerSymTag = env.scope.owner.tag;
if ((ownerSymTag & SymTag.INVOKABLE) == SymTag.INVOKABLE || (ownerSymTag & SymTag.LET) == SymTag.LET) {
if (simpleVariable.symbol == null) {
symbolEnter.defineNode(simpleVariable, env);
}
}
simpleVariable.symbol.type = rhsType;
break;
case TUPLE_VARIABLE:
if (varRefExpr == null) {
return;
}
if (variable.isDeclaredWithVar && variable.expr.getKind() == NodeKind.LIST_CONSTRUCTOR_EXPR) {
List<String> bindingPatternVars = new ArrayList<>();
List<BLangVariable> members = ((BLangTupleVariable) variable).memberVariables;
for (BLangVariable var : members) {
bindingPatternVars.add(((BLangSimpleVariable) var).name.value);
}
dlog.error(varRefExpr.pos, DiagnosticCode.CANNOT_INFER_TYPES_FOR_TUPLE_BINDING, bindingPatternVars);
variable.type = symTable.semanticError;
return;
}
if (TypeTags.TUPLE != rhsType.tag) {
dlog.error(varRefExpr.pos, DiagnosticCode.INVALID_TUPLE_BINDING_PATTERN_INFERENCE, rhsType);
variable.type = symTable.semanticError;
return;
}
BLangTupleVariable tupleVariable = (BLangTupleVariable) variable;
tupleVariable.type = rhsType;
if (!(checkTypeAndVarCountConsistency(tupleVariable))) {
tupleVariable.type = symTable.semanticError;
return;
}
symbolEnter.defineNode(tupleVariable, env);
break;
case RECORD_VARIABLE:
if (varRefExpr == null) {
return;
}
if (TypeTags.RECORD != rhsType.tag && TypeTags.MAP != rhsType.tag && TypeTags.JSON != rhsType.tag) {
dlog.error(varRefExpr.pos, DiagnosticCode.INVALID_TYPE_DEFINITION_FOR_RECORD_VAR, rhsType);
variable.type = symTable.semanticError;
}
BLangRecordVariable recordVariable = (BLangRecordVariable) variable;
recordVariable.type = rhsType;
if (!validateRecordVariable(recordVariable)) {
recordVariable.type = symTable.semanticError;
}
break;
case ERROR_VARIABLE:
if (varRefExpr == null) {
return;
}
if (TypeTags.ERROR != rhsType.tag) {
dlog.error(variable.expr.pos, DiagnosticCode.INVALID_TYPE_DEFINITION_FOR_ERROR_VAR, rhsType);
variable.type = symTable.semanticError;
return;
}
BLangErrorVariable errorVariable = (BLangErrorVariable) variable;
if (errorVariable.typeNode != null) {
symResolver.resolveTypeNode(errorVariable.typeNode, env);
}
errorVariable.type = rhsType;
if (!validateErrorVariable(errorVariable)) {
errorVariable.type = symTable.semanticError;
return;
}
symbolEnter.defineNode(errorVariable, env);
break;
}
}
void handleDeclaredVarInForeach(BLangVariable variable, BType rhsType, SymbolEnv blockEnv) {
switch (variable.getKind()) {
case VARIABLE:
BLangSimpleVariable simpleVariable = (BLangSimpleVariable) variable;
Name varName = names.fromIdNode(simpleVariable.name);
if (varName == Names.IGNORE) {
dlog.error(simpleVariable.pos, DiagnosticCode.UNDERSCORE_NOT_ALLOWED);
return;
}
simpleVariable.type = rhsType;
int ownerSymTag = blockEnv.scope.owner.tag;
if ((ownerSymTag & SymTag.INVOKABLE) == SymTag.INVOKABLE) {
if (simpleVariable.symbol == null) {
symbolEnter.defineNode(simpleVariable, blockEnv);
}
}
recursivelySetFinalFlag(simpleVariable);
break;
case TUPLE_VARIABLE:
BLangTupleVariable tupleVariable = (BLangTupleVariable) variable;
if (TypeTags.TUPLE != rhsType.tag && TypeTags.UNION != rhsType.tag) {
dlog.error(variable.pos, DiagnosticCode.INVALID_TUPLE_BINDING_PATTERN_INFERENCE, rhsType);
recursivelyDefineVariables(tupleVariable, blockEnv);
return;
}
tupleVariable.type = rhsType;
if (rhsType.tag == TypeTags.TUPLE && !(checkTypeAndVarCountConsistency(tupleVariable,
(BTupleType) tupleVariable.type, blockEnv))) {
recursivelyDefineVariables(tupleVariable, blockEnv);
return;
}
if (rhsType.tag == TypeTags.UNION && !(checkTypeAndVarCountConsistency(tupleVariable, null,
blockEnv))) {
recursivelyDefineVariables(tupleVariable, blockEnv);
return;
}
symbolEnter.defineNode(tupleVariable, blockEnv);
recursivelySetFinalFlag(tupleVariable);
break;
case RECORD_VARIABLE:
BLangRecordVariable recordVariable = (BLangRecordVariable) variable;
recordVariable.type = rhsType;
validateRecordVariable(recordVariable, blockEnv);
recursivelySetFinalFlag(recordVariable);
break;
case ERROR_VARIABLE:
BLangErrorVariable errorVariable = (BLangErrorVariable) variable;
if (TypeTags.ERROR != rhsType.tag) {
dlog.error(variable.pos, DiagnosticCode.INVALID_TYPE_DEFINITION_FOR_ERROR_VAR, rhsType);
recursivelyDefineVariables(errorVariable, blockEnv);
return;
}
errorVariable.type = rhsType;
validateErrorVariable(errorVariable);
recursivelySetFinalFlag(errorVariable);
break;
}
}
private void recursivelyDefineVariables(BLangVariable variable, SymbolEnv blockEnv) {
switch (variable.getKind()) {
case VARIABLE:
Name name = names.fromIdNode(((BLangSimpleVariable) variable).name);
if (name == Names.IGNORE) {
return;
}
variable.type = symTable.semanticError;
symbolEnter.defineVarSymbol(variable.pos, variable.flagSet, variable.type, name, blockEnv,
variable.internal);
break;
case TUPLE_VARIABLE:
((BLangTupleVariable) variable).memberVariables.forEach(memberVariable ->
recursivelyDefineVariables(memberVariable, blockEnv));
break;
case RECORD_VARIABLE:
((BLangRecordVariable) variable).variableList.forEach(value ->
recursivelyDefineVariables(value.valueBindingPattern, blockEnv));
break;
}
}
private void recursivelySetFinalFlag(BLangVariable variable) {
if (variable == null) {
return;
}
switch (variable.getKind()) {
case VARIABLE:
if (variable.symbol == null) {
return;
}
variable.symbol.flags |= Flags.FINAL;
break;
case TUPLE_VARIABLE:
BLangTupleVariable tupleVariable = (BLangTupleVariable) variable;
tupleVariable.memberVariables.forEach(this::recursivelySetFinalFlag);
recursivelySetFinalFlag(tupleVariable.restVariable);
break;
case RECORD_VARIABLE:
BLangRecordVariable recordVariable = (BLangRecordVariable) variable;
recordVariable.variableList.forEach(value -> recursivelySetFinalFlag(value.valueBindingPattern));
recursivelySetFinalFlag((BLangVariable) recordVariable.restParam);
break;
case ERROR_VARIABLE:
BLangErrorVariable errorVariable = (BLangErrorVariable) variable;
recursivelySetFinalFlag(errorVariable.message);
recursivelySetFinalFlag(errorVariable.restDetail);
errorVariable.detail.forEach(bLangErrorDetailEntry ->
recursivelySetFinalFlag(bLangErrorDetailEntry.valueBindingPattern));
break;
}
}
private boolean checkTypeAndVarCountConsistency(BLangTupleVariable varNode) {
return checkTypeAndVarCountConsistency(varNode, null, env);
}
private boolean checkTypeAndVarCountConsistency(BLangTupleVariable varNode, BTupleType tupleTypeNode,
SymbolEnv env) {
if (tupleTypeNode == null) {
/*
This switch block will resolve the tuple type of the tuple variable.
For example consider the following - [int, string]|[boolean, float] [a, b] = foo();
Since the varNode type is a union, the types of 'a' and 'b' will be resolved as follows:
Type of 'a' will be (int | boolean) while the type of 'b' will be (string | float).
Consider anydata (a, b) = foo();
Here, the type of 'a'and type of 'b' will be both anydata.
*/
switch (varNode.type.tag) {
case TypeTags.UNION:
Set<BType> unionType = types.expandAndGetMemberTypesRecursive(varNode.type);
List<BType> possibleTypes = unionType.stream()
.filter(type -> {
if (TypeTags.TUPLE == type.tag &&
(varNode.memberVariables.size() == ((BTupleType) type).tupleTypes.size())) {
return true;
}
return TypeTags.ANY == type.tag || TypeTags.ANYDATA == type.tag;
})
.collect(Collectors.toList());
if (possibleTypes.isEmpty()) {
dlog.error(varNode.pos, DiagnosticCode.INVALID_TUPLE_BINDING_PATTERN_DECL, varNode.type);
return false;
}
if (possibleTypes.size() > 1) {
List<BType> memberTupleTypes = new ArrayList<>();
for (int i = 0; i < varNode.memberVariables.size(); i++) {
LinkedHashSet<BType> memberTypes = new LinkedHashSet<>();
for (BType possibleType : possibleTypes) {
if (possibleType.tag == TypeTags.TUPLE) {
memberTypes.add(((BTupleType) possibleType).tupleTypes.get(i));
} else {
memberTupleTypes.add(varNode.type);
}
}
if (memberTypes.size() > 1) {
memberTupleTypes.add(BUnionType.create(null, memberTypes));
} else {
memberTupleTypes.addAll(memberTypes);
}
}
tupleTypeNode = new BTupleType(memberTupleTypes);
break;
}
if (possibleTypes.get(0).tag == TypeTags.TUPLE) {
tupleTypeNode = (BTupleType) possibleTypes.get(0);
break;
}
List<BType> memberTypes = new ArrayList<>();
for (int i = 0; i < varNode.memberVariables.size(); i++) {
memberTypes.add(possibleTypes.get(0));
}
tupleTypeNode = new BTupleType(memberTypes);
break;
case TypeTags.ANY:
case TypeTags.ANYDATA:
List<BType> memberTupleTypes = new ArrayList<>();
for (int i = 0; i < varNode.memberVariables.size(); i++) {
memberTupleTypes.add(varNode.type);
}
tupleTypeNode = new BTupleType(memberTupleTypes);
if (varNode.restVariable != null) {
tupleTypeNode.restType = varNode.type;
}
break;
case TypeTags.TUPLE:
tupleTypeNode = (BTupleType) varNode.type;
break;
default:
dlog.error(varNode.pos, DiagnosticCode.INVALID_TUPLE_BINDING_PATTERN_DECL, varNode.type);
return false;
}
}
if (tupleTypeNode.tupleTypes.size() != varNode.memberVariables.size()
|| (tupleTypeNode.restType == null && varNode.restVariable != null)
|| (tupleTypeNode.restType != null && varNode.restVariable == null)) {
dlog.error(varNode.pos, DiagnosticCode.INVALID_TUPLE_BINDING_PATTERN);
return false;
}
int ignoredCount = 0;
List<BLangVariable> memberVariables = new ArrayList<>(varNode.memberVariables);
if (varNode.restVariable != null) {
memberVariables.add(varNode.restVariable);
}
for (int i = 0; i < memberVariables.size(); i++) {
BLangVariable var = memberVariables.get(i);
BType type = (i <= tupleTypeNode.tupleTypes.size() - 1) ? tupleTypeNode.tupleTypes.get(i) :
new BArrayType(tupleTypeNode.restType);
if (var.getKind() == NodeKind.VARIABLE) {
BLangSimpleVariable simpleVar = (BLangSimpleVariable) var;
Name varName = names.fromIdNode(simpleVar.name);
if (varName == Names.IGNORE) {
ignoredCount++;
simpleVar.type = symTable.anyType;
types.checkType(varNode.pos, type, simpleVar.type,
DiagnosticCode.INCOMPATIBLE_TYPES);
continue;
}
}
var.type = type;
analyzeNode(var, env);
}
if (!varNode.memberVariables.isEmpty() && ignoredCount == varNode.memberVariables.size()
&& varNode.restVariable == null) {
dlog.error(varNode.pos, DiagnosticCode.NO_NEW_VARIABLES_VAR_ASSIGNMENT);
return false;
}
return true;
}
private boolean validateRecordVariable(BLangRecordVariable recordVar) {
return validateRecordVariable(recordVar, env);
}
private boolean validateRecordVariable(BLangRecordVariable recordVar, SymbolEnv env) {
BRecordType recordVarType;
/*
This switch block will resolve the record type of the record variable.
For example consider the following -
type Foo record {int a, boolean b};
type Bar record {string a, float b};
Foo|Bar {a, b} = foo();
Since the varNode type is a union, the types of 'a' and 'b' will be resolved as follows:
Type of 'a' will be a union of the types of field 'a' in both Foo and Bar.
i.e. type of 'a' is (int | string) and type of 'b' is (boolean | float).
Consider anydata {a, b} = foo();
Here, the type of 'a'and type of 'b' will be both anydata.
*/
switch (recordVar.type.tag) {
case TypeTags.UNION:
BUnionType unionType = (BUnionType) recordVar.type;
Set<BType> bTypes = types.expandAndGetMemberTypesRecursive(unionType);
List<BType> possibleTypes = bTypes.stream()
.filter(rec -> doesRecordContainKeys(rec, recordVar.variableList, recordVar.restParam != null))
.collect(Collectors.toList());
if (possibleTypes.isEmpty()) {
dlog.error(recordVar.pos, DiagnosticCode.INVALID_RECORD_BINDING_PATTERN, recordVar.type);
return false;
}
if (possibleTypes.size() > 1) {
BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(0,
names.fromString(ANONYMOUS_RECORD_NAME),
env.enclPkg.symbol.pkgID, null,
env.scope.owner, recordVar.pos, SOURCE);
recordVarType = (BRecordType) symTable.recordType;
LinkedHashMap<String, BField> fields =
populateAndGetPossibleFieldsForRecVar(recordVar, possibleTypes, recordSymbol);
if (recordVar.restParam != null) {
LinkedHashSet<BType> memberTypes = possibleTypes.stream()
.map(possibleType -> {
if (possibleType.tag == TypeTags.RECORD) {
return ((BRecordType) possibleType).restFieldType;
} else if (possibleType.tag == TypeTags.MAP) {
return ((BMapType) possibleType).constraint;
} else {
return possibleType;
}
})
.collect(Collectors.toCollection(LinkedHashSet::new));
recordVarType.restFieldType = memberTypes.size() > 1 ?
BUnionType.create(null, memberTypes) :
memberTypes.iterator().next();
}
recordVarType.tsymbol = recordSymbol;
recordVarType.fields = fields;
recordSymbol.type = recordVarType;
break;
}
if (possibleTypes.get(0).tag == TypeTags.RECORD) {
recordVarType = (BRecordType) possibleTypes.get(0);
break;
}
if (possibleTypes.get(0).tag == TypeTags.MAP) {
recordVarType = createSameTypedFieldsRecordType(recordVar,
((BMapType) possibleTypes.get(0)).constraint);
break;
}
recordVarType = createSameTypedFieldsRecordType(recordVar, possibleTypes.get(0));
break;
case TypeTags.RECORD:
recordVarType = (BRecordType) recordVar.type;
break;
case TypeTags.MAP:
recordVarType = createSameTypedFieldsRecordType(recordVar, ((BMapType) recordVar.type).constraint);
break;
case TypeTags.ANY:
case TypeTags.ANYDATA:
recordVarType = createSameTypedFieldsRecordType(recordVar, recordVar.type);
break;
default:
dlog.error(recordVar.pos, DiagnosticCode.INVALID_RECORD_BINDING_PATTERN, recordVar.type);
return false;
}
LinkedHashMap<String, BField> recordVarTypeFields = recordVarType.fields;
boolean validRecord = true;
int ignoredCount = 0;
for (BLangRecordVariableKeyValue variable : recordVar.variableList) {
if (names.fromIdNode(variable.getKey()) == Names.IGNORE) {
dlog.error(recordVar.pos, DiagnosticCode.UNDERSCORE_NOT_ALLOWED);
continue;
}
BLangVariable value = variable.getValue();
if (value.getKind() == NodeKind.VARIABLE) {
BLangSimpleVariable simpleVar = (BLangSimpleVariable) value;
Name varName = names.fromIdNode(simpleVar.name);
if (varName == Names.IGNORE) {
ignoredCount++;
simpleVar.type = symTable.anyType;
if (!recordVarTypeFields.containsKey(variable.getKey().getValue())) {
continue;
}
types.checkType(variable.valueBindingPattern.pos,
recordVarTypeFields.get((variable.getKey().getValue())).type, simpleVar.type,
DiagnosticCode.INCOMPATIBLE_TYPES);
continue;
}
}
if (!recordVarTypeFields.containsKey(variable.getKey().getValue())) {
if (recordVarType.sealed) {
validRecord = false;
dlog.error(recordVar.pos, DiagnosticCode.INVALID_FIELD_IN_RECORD_BINDING_PATTERN,
variable.getKey().getValue(), recordVar.type);
} else {
BType restType;
if (recordVarType.restFieldType.tag == TypeTags.ANYDATA ||
recordVarType.restFieldType.tag == TypeTags.ANY) {
restType = recordVarType.restFieldType;
} else {
restType = BUnionType.create(null, recordVarType.restFieldType, symTable.nilType);
}
value.type = restType;
analyzeNode(value, env);
}
continue;
}
value.type = recordVarTypeFields.get((variable.getKey().getValue())).type;
analyzeNode(value, env);
}
if (!recordVar.variableList.isEmpty() && ignoredCount == recordVar.variableList.size()
&& recordVar.restParam == null) {
dlog.error(recordVar.pos, DiagnosticCode.NO_NEW_VARIABLES_VAR_ASSIGNMENT);
return false;
}
if (recordVar.restParam != null) {
((BLangVariable) recordVar.restParam).type = getRestParamType(recordVarType);
symbolEnter.defineNode((BLangNode) recordVar.restParam, env);
}
return validRecord;
}
private boolean validateErrorVariable(BLangErrorVariable errorVariable) {
BErrorType errorType;
switch (errorVariable.type.tag) {
case TypeTags.UNION:
BUnionType unionType = ((BUnionType) errorVariable.type);
List<BErrorType> possibleTypes = unionType.getMemberTypes().stream()
.filter(type -> TypeTags.ERROR == type.tag)
.map(BErrorType.class::cast)
.collect(Collectors.toList());
if (possibleTypes.isEmpty()) {
dlog.error(errorVariable.pos, DiagnosticCode.INVALID_ERROR_BINDING_PATTERN, errorVariable.type);
return false;
}
if (possibleTypes.size() > 1) {
LinkedHashSet<BType> detailType = new LinkedHashSet<>();
for (BErrorType possibleErrType : possibleTypes) {
detailType.add(possibleErrType.detailType);
}
BType errorDetailType = detailType.size() > 1
? BUnionType.create(null, detailType)
: detailType.iterator().next();
errorType = new BErrorType(null, errorDetailType);
} else {
errorType = possibleTypes.get(0);
}
break;
case TypeTags.ERROR:
errorType = (BErrorType) errorVariable.type;
break;
default:
dlog.error(errorVariable.pos, DiagnosticCode.INVALID_ERROR_BINDING_PATTERN, errorVariable.type);
return false;
}
errorVariable.type = errorType;
if (!errorVariable.isInMatchStmt) {
errorVariable.message.type = symTable.stringType;
errorVariable.message.accept(this);
if (errorVariable.cause != null) {
errorVariable.cause.type = symTable.errorOrNilType;
errorVariable.cause.accept(this);
}
}
if (errorVariable.detail == null || (errorVariable.detail.isEmpty()
&& !isRestDetailBindingAvailable(errorVariable))) {
return validateErrorMessageMatchPatternSyntax(errorVariable);
}
if (errorType.detailType.getKind() == TypeKind.RECORD || errorType.detailType.getKind() == TypeKind.MAP) {
return validateErrorVariable(errorVariable, errorType);
} else if (errorType.detailType.getKind() == TypeKind.UNION) {
BErrorTypeSymbol errorTypeSymbol = new BErrorTypeSymbol(SymTag.ERROR, Flags.PUBLIC, Names.ERROR,
env.enclPkg.packageID, symTable.errorType,
env.scope.owner, errorVariable.pos, SOURCE);
errorVariable.type = new BErrorType(errorTypeSymbol, symTable.detailType);
return validateErrorVariable(errorVariable);
}
if (isRestDetailBindingAvailable(errorVariable)) {
errorVariable.restDetail.type = symTable.detailType;
errorVariable.restDetail.accept(this);
}
return true;
}
private boolean validateErrorVariable(BLangErrorVariable errorVariable, BErrorType errorType) {
errorVariable.message.type = symTable.stringType;
errorVariable.message.accept(this);
BRecordType recordType = getDetailAsARecordType(errorType);
LinkedHashMap<String, BField> detailFields = recordType.fields;
Set<String> matchedDetailFields = new HashSet<>();
for (BLangErrorVariable.BLangErrorDetailEntry errorDetailEntry : errorVariable.detail) {
String entryName = errorDetailEntry.key.getValue();
matchedDetailFields.add(entryName);
BField entryField = detailFields.get(entryName);
BLangVariable boundVar = errorDetailEntry.valueBindingPattern;
if (entryField != null) {
if ((entryField.symbol.flags & Flags.OPTIONAL) == Flags.OPTIONAL) {
boundVar.type = BUnionType.create(null, entryField.type, symTable.nilType);
} else {
boundVar.type = entryField.type;
}
} else {
if (recordType.sealed) {
dlog.error(errorVariable.pos, DiagnosticCode.INVALID_ERROR_BINDING_PATTERN, errorVariable.type);
boundVar.type = symTable.semanticError;
return false;
} else {
boundVar.type = BUnionType.create(null, recordType.restFieldType, symTable.nilType);
}
}
boolean isIgnoredVar = boundVar.getKind() == NodeKind.VARIABLE
&& ((BLangSimpleVariable) boundVar).name.value.equals(Names.IGNORE.value);
if (!isIgnoredVar) {
boundVar.accept(this);
}
}
if (isRestDetailBindingAvailable(errorVariable)) {
BTypeSymbol typeSymbol = createTypeSymbol(SymTag.TYPE);
BType constraint = getRestMapConstraintType(detailFields, matchedDetailFields, recordType);
BMapType restType = new BMapType(TypeTags.MAP, constraint, typeSymbol);
typeSymbol.type = restType;
errorVariable.restDetail.type = restType;
errorVariable.restDetail.accept(this);
}
return true;
}
private BRecordType getDetailAsARecordType(BErrorType errorType) {
if (errorType.detailType.getKind() == TypeKind.RECORD) {
return (BRecordType) errorType.detailType;
}
BRecordType detailRecord = new BRecordType(null);
BMapType detailMap = (BMapType) errorType.detailType;
detailRecord.sealed = false;
detailRecord.restFieldType = detailMap.constraint;
return detailRecord;
}
private BType getRestMapConstraintType(Map<String, BField> errorDetailFields, Set<String> matchedDetailFields,
BRecordType recordType) {
BUnionType restUnionType = BUnionType.create(null);
if (!recordType.sealed) {
restUnionType.add(recordType.restFieldType);
}
for (Map.Entry<String, BField> entry : errorDetailFields.entrySet()) {
if (!matchedDetailFields.contains(entry.getKey())) {
BType type = entry.getValue().getType();
if (!types.isAssignable(type, restUnionType)) {
restUnionType.add(type);
}
}
}
Set<BType> memberTypes = restUnionType.getMemberTypes();
if (memberTypes.size() == 1) {
return memberTypes.iterator().next();
}
return restUnionType;
}
private boolean validateErrorMessageMatchPatternSyntax(BLangErrorVariable errorVariable) {
if (errorVariable.isInMatchStmt
&& !errorVariable.reasonVarPrefixAvailable
&& errorVariable.reasonMatchConst == null
&& isReasonSpecified(errorVariable)) {
BSymbol reasonConst = symResolver.lookupSymbolInMainSpace(this.env.enclEnv,
names.fromString(errorVariable.message.name.value));
if ((reasonConst.tag & SymTag.CONSTANT) != SymTag.CONSTANT) {
dlog.error(errorVariable.message.pos, DiagnosticCode.INVALID_ERROR_REASON_BINDING_PATTERN,
errorVariable.message.name);
} else {
dlog.error(errorVariable.message.pos, DiagnosticCode.UNSUPPORTED_ERROR_REASON_CONST_MATCH);
}
return false;
}
return true;
}
private boolean isReasonSpecified(BLangErrorVariable errorVariable) {
return !isIgnoredOrEmpty(errorVariable.message);
}
private boolean isIgnoredOrEmpty(BLangSimpleVariable varNode) {
return varNode.name.value.equals(Names.IGNORE.value) || varNode.name.value.equals("");
}
private boolean isRestDetailBindingAvailable(BLangErrorVariable errorVariable) {
return errorVariable.restDetail != null &&
!errorVariable.restDetail.name.value.equals(Names.IGNORE.value);
}
private BTypeSymbol createTypeSymbol(int type) {
return new BTypeSymbol(type, Flags.PUBLIC, Names.EMPTY, env.enclPkg.packageID,
null, env.scope.owner, symTable.builtinPos, VIRTUAL);
}
/**
* This method will resolve field types based on a list of possible types.
* When a record variable has multiple possible assignable types, each field will be a union of the relevant
* possible types field type.
*
* @param recordVar record variable whose fields types are to be resolved
* @param possibleTypes list of possible types
* @param recordSymbol symbol of the record type to be used in creating fields
* @return the list of fields
*/
private LinkedHashMap<String, BField> populateAndGetPossibleFieldsForRecVar(BLangRecordVariable recordVar,
List<BType> possibleTypes,
BRecordTypeSymbol recordSymbol) {
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
for (BLangRecordVariableKeyValue bLangRecordVariableKeyValue : recordVar.variableList) {
String fieldName = bLangRecordVariableKeyValue.key.value;
LinkedHashSet<BType> memberTypes = new LinkedHashSet<>();
for (BType possibleType : possibleTypes) {
if (possibleType.tag == TypeTags.RECORD) {
BRecordType possibleRecordType = (BRecordType) possibleType;
if (possibleRecordType.fields.containsKey(fieldName)) {
BField field = possibleRecordType.fields.get(fieldName);
if (Symbols.isOptional(field.symbol)) {
memberTypes.add(symTable.nilType);
}
memberTypes.add(field.type);
} else {
memberTypes.add(possibleRecordType.restFieldType);
memberTypes.add(symTable.nilType);
}
continue;
}
if (possibleType.tag == TypeTags.MAP) {
BMapType possibleMapType = (BMapType) possibleType;
memberTypes.add(possibleMapType.constraint);
continue;
}
memberTypes.add(possibleType);
}
BType fieldType = memberTypes.size() > 1 ?
BUnionType.create(null, memberTypes) : memberTypes.iterator().next();
BField field = new BField(names.fromString(fieldName), recordVar.pos,
new BVarSymbol(0, names.fromString(fieldName), env.enclPkg.symbol.pkgID,
fieldType, recordSymbol, recordVar.pos, SOURCE));
fields.put(field.name.value, field);
}
return fields;
} | class defined for an object-constructor-expression (OCE). This will be analyzed when
continue;
}
analyzeDef((BLangNode) pkgLevelNode, pkgEnv);
}
while (pkgNode.lambdaFunctions.peek() != null) {
BLangLambdaFunction lambdaFunction = pkgNode.lambdaFunctions.poll();
BLangFunction function = lambdaFunction.function;
lambdaFunction.type = function.symbol.type;
analyzeDef(lambdaFunction.function, lambdaFunction.capturedClosureEnv);
} |
Other places where this is used validates either the package names or the module names. So this change will prevent `.` from being used as a package name or a module name. | private static boolean validateDotSeparatedIdentifiers(String identifiers) {
Matcher m = separatedIdentifierPattern.matcher(identifiers);
Matcher mm = onlyDotsPattern.matcher(identifiers);
return m.matches() && !mm.matches();
} | Matcher mm = onlyDotsPattern.matcher(identifiers); | private static boolean validateDotSeparatedIdentifiers(String identifiers) {
Matcher m = separatedIdentifierPattern.matcher(identifiers);
Matcher mm = onlyDotsPattern.matcher(identifiers);
return m.matches() && !mm.matches();
} | class ProjectUtils {
private static final String USER_HOME = "user.home";
private static final Pattern separatedIdentifierPattern = Pattern.compile("^[a-zA-Z0-9_.]*$");
private static final Pattern onlyDotsPattern = Pattern.compile("^[.]+$");
private static final Pattern orgNamePattern = Pattern.compile("^[a-zA-Z0-9_]*$");
/**
* Validates the org-name.
*
* @param orgName The org-name
* @return True if valid org-name or package name, else false.
*/
public static boolean validateOrgName(String orgName) {
Matcher m = orgNamePattern.matcher(orgName);
return m.matches();
}
/**
* Validates the package name.
*
* @param packageName The package name.
* @return True if valid package name, else false.
*/
public static boolean validatePackageName(String packageName) {
return validateDotSeparatedIdentifiers(packageName)
&& validateUnderscoresOfName(packageName)
&& validateInitialNumericsOfName(packageName);
}
/**
* Validates the package name.
*
* @param orgName The organization name.
* @param packageName The package name.
* @return True if valid package name, else false.
*/
public static boolean validatePackageName(String orgName, String packageName) {
if (isLangLibPackage(PackageOrg.from(orgName), PackageName.from(packageName))) {
return validateDotSeparatedIdentifiers(packageName)
&& validateInitialNumericsOfName(packageName);
}
return validateDotSeparatedIdentifiers(packageName)
&& validateUnderscoresOfName(packageName)
&& validateInitialNumericsOfName(packageName);
}
/**
* Validates the module name.
*
* @param moduleName The module name.
* @return True if valid module name, else false.
*/
public static boolean validateModuleName(String moduleName) {
return validateDotSeparatedIdentifiers(moduleName);
}
/**
* Validates the organization, package or module name length.
* Maximum length is 256 characters.
*
* @param name name.
* @return true if valid name length, else false.
*/
public static boolean validateNameLength(String name) {
return name.length() <= 256;
}
/**
* Checks the organization, package or module name has initial, trailing or consecutive underscores.
*
* @param name name.
* @return true if name does not have initial, trailing or consecutive underscores, else false.
*/
public static boolean validateUnderscoresOfName(String name) {
return !(name.startsWith("_") || name.endsWith("_") || name.contains("__"));
}
/**
* Checks the organization, package or module name has initial numeric characters.
*
* @param name name.
* @return true if name does not have initial numeric characters, else false.
*/
public static boolean validateInitialNumericsOfName(String name) {
return !name.matches("[0-9].*");
}
/**
* Remove last character of the given string.
*
* @param aString given string
* @return string removed last character
*/
public static String removeLastChar(String aString) {
return aString.substring(0, aString.length() - 1);
}
/**
* Remove first character of the given string.
*
* @param aString given string
* @return string removed last character
*/
public static String removeFirstChar(String aString) {
return aString.substring(1);
}
public static String getPackageValidationError(String packageName) {
if (!validateDotSeparatedIdentifiers(packageName)) {
return "Package name can only contain alphanumerics and underscores.";
} else if (!validateInitialNumericsOfName(packageName)) {
return "Package name cannot have initial numeric characters.";
} else {
return getValidateUnderscoreError(packageName, "Package");
}
}
/**
* Get specific error message when organization, package or module name has initial, trailing or
* consecutive underscores.
*
* @param name name.
* @param packageOrModule package or module.
* @return specific error message.
*/
public static String getValidateUnderscoreError(String name, String packageOrModule) {
if (name.startsWith("_")) {
return packageOrModule + " name cannot have initial underscore characters.";
} else if (name.endsWith("_")) {
return packageOrModule + " name cannot have trailing underscore characters.";
} else {
return packageOrModule + " name cannot have consecutive underscore characters.";
}
}
/**
* Find the project root by recursively up to the root.
*
* @param filePath project path
* @return project root
*/
public static Path findProjectRoot(Path filePath) {
if (filePath != null) {
filePath = filePath.toAbsolutePath().normalize();
if (filePath.toFile().isDirectory()) {
if (Files.exists(filePath.resolve(BALLERINA_TOML))) {
return filePath;
}
}
return findProjectRoot(filePath.getParent());
}
return null;
}
/**
* Checks if the path is a Ballerina project.
*
* @param sourceRoot source root of the project.
* @return true if the directory is a project repo, false if its the home repo
*/
public static boolean isBallerinaProject(Path sourceRoot) {
Path ballerinaToml = sourceRoot.resolve(BALLERINA_TOML);
return Files.isDirectory(sourceRoot)
&& Files.exists(ballerinaToml)
&& Files.isRegularFile(ballerinaToml);
}
/**
* Guess organization name based on user name in system.
*
* @return organization name
*/
public static String guessOrgName() {
String guessOrgName = System.getProperty(USER_NAME);
if (guessOrgName == null) {
guessOrgName = "my_org";
} else {
if (!validateOrgName(guessOrgName)) {
guessOrgName = guessOrgName.replaceAll("[^a-zA-Z0-9_]", "_");
}
}
return guessOrgName.toLowerCase(Locale.getDefault());
}
/**
* Guess package name with valid pattern.
*
* @param packageName package name
* @param template template name
* @return package name
*/
public static String guessPkgName(String packageName, String template) {
if (!validatePackageName(packageName)) {
packageName = packageName.replaceAll("[^a-zA-Z0-9_.]", "_");
}
if (packageName.matches("[0-9].*")) {
if (template.equalsIgnoreCase("lib")) {
packageName = "lib" + packageName;
} else {
packageName = "app" + packageName;
}
}
if (packageName.startsWith("_")) {
packageName = removeFirstChar(packageName);
}
if (packageName.contains("__")) {
packageName = packageName.replaceAll("__", "_");
}
if (packageName.endsWith("_")) {
packageName = removeLastChar(packageName);
}
return packageName;
}
/**
* Guess module name with valid pattern.
*
* @param moduleName module name
* @return module name
*/
public static String guessModuleName(String moduleName) {
if (!validateModuleName(moduleName)) {
return moduleName.replaceAll("[^a-zA-Z0-9_.]", "_");
}
return moduleName;
}
public static PackageOrg defaultOrg() {
return PackageOrg.from(guessOrgName());
}
public static PackageName defaultName(Path projectPath) {
return PackageName.from(guessPkgName(Optional.ofNullable(projectPath.getFileName())
.map(Path::toString).orElse(""), "app"));
}
public static PackageVersion defaultVersion() {
return PackageVersion.from(ProjectConstants.INTERNAL_VERSION);
}
public static String getBalaName(PackageManifest pkgDesc) {
return ProjectUtils.getBalaName(pkgDesc.org().toString(),
pkgDesc.name().toString(),
pkgDesc.version().toString(),
null
);
}
public static String getBalaName(String org, String pkgName, String version, String platform) {
if (platform == null || "".equals(platform)) {
platform = "any";
}
return org + "-" + pkgName + "-" + platform + "-" + version + BLANG_COMPILED_PKG_BINARY_EXT;
}
/**
* Returns the relative path of extracted bala beginning from the package org.
*
* @param org package org
* @param pkgName package name
* @param version package version
* @param platform version, null converts to `any`
* @return relative bala path
*/
public static Path getRelativeBalaPath(String org, String pkgName, String version, String platform) {
if (platform == null || "".equals(platform)) {
platform = "any";
}
return Paths.get(org, pkgName, version, platform);
}
public static String getJarFileName(Package pkg) {
return pkg.packageOrg().toString() + "-" + pkg.packageName().toString()
+ "-" + pkg.packageVersion() + BLANG_COMPILED_JAR_EXT;
}
public static String getExecutableName(Package pkg) {
return pkg.packageName().toString() + BLANG_COMPILED_JAR_EXT;
}
public static String getOrgFromBalaName(String balaName) {
return balaName.split("-")[0];
}
public static String getPackageNameFromBalaName(String balaName) {
return balaName.split("-")[1];
}
public static String getVersionFromBalaName(String balaName) {
String versionAndExtension = balaName.split("-")[3];
int extensionIndex = versionAndExtension.indexOf(BLANG_COMPILED_PKG_BINARY_EXT);
return versionAndExtension.substring(0, extensionIndex);
}
private static final HashSet<String> excludeExtensions = new HashSet<>(Lists.of("DSA", "SF"));
public static Path getBalHomePath() {
return Paths.get(System.getProperty(BALLERINA_HOME));
}
public static Path getBallerinaRTJarPath() {
String ballerinaVersion = RepoUtils.getBallerinaPackVersion();
String runtimeJarName = "ballerina-rt-" + ballerinaVersion + BLANG_COMPILED_JAR_EXT;
return getBalHomePath().resolve("bre").resolve("lib").resolve(runtimeJarName);
}
public static List<JarLibrary> testDependencies() {
List<JarLibrary> dependencies = new ArrayList<>();
String testPkgName = "ballerina/test";
String ballerinaVersion = RepoUtils.getBallerinaPackVersion();
Path homeLibPath = getBalHomePath().resolve(BALLERINA_HOME_BRE).resolve(LIB_DIR);
String testRuntimeJarName = TEST_RUNTIME_JAR_PREFIX + ballerinaVersion + BLANG_COMPILED_JAR_EXT;
String testCoreJarName = TEST_CORE_JAR_PREFIX + ballerinaVersion + BLANG_COMPILED_JAR_EXT;
String langJarName = "ballerina-lang-" + ballerinaVersion + BLANG_COMPILED_JAR_EXT;
Path testRuntimeJarPath = homeLibPath.resolve(testRuntimeJarName);
Path testCoreJarPath = homeLibPath.resolve(testCoreJarName);
Path langJarPath = homeLibPath.resolve(langJarName);
Path jacocoCoreJarPath = homeLibPath.resolve(JACOCO_CORE_JAR);
Path jacocoReportJarPath = homeLibPath.resolve(JACOCO_REPORT_JAR);
Path asmJarPath = homeLibPath.resolve(ASM_JAR);
Path asmTreeJarPath = homeLibPath.resolve(ASM_TREE_JAR);
Path asmCommonsJarPath = homeLibPath.resolve(ASM_COMMONS_JAR);
Path diffUtilsJarPath = homeLibPath.resolve(DIFF_UTILS_JAR);
dependencies.add(new JarLibrary(testRuntimeJarPath, PlatformLibraryScope.TEST_ONLY, testPkgName));
dependencies.add(new JarLibrary(testCoreJarPath, PlatformLibraryScope.TEST_ONLY, testPkgName));
dependencies.add(new JarLibrary(langJarPath, PlatformLibraryScope.TEST_ONLY, testPkgName));
dependencies.add(new JarLibrary(jacocoCoreJarPath, PlatformLibraryScope.TEST_ONLY, testPkgName));
dependencies.add(new JarLibrary(jacocoReportJarPath, PlatformLibraryScope.TEST_ONLY, testPkgName));
dependencies.add(new JarLibrary(asmJarPath, PlatformLibraryScope.TEST_ONLY, testPkgName));
dependencies.add(new JarLibrary(asmTreeJarPath, PlatformLibraryScope.TEST_ONLY, testPkgName));
dependencies.add(new JarLibrary(asmCommonsJarPath, PlatformLibraryScope.TEST_ONLY, testPkgName));
dependencies.add(new JarLibrary(diffUtilsJarPath, PlatformLibraryScope.TEST_ONLY, testPkgName));
return dependencies;
}
public static Path generateObservabilitySymbolsJar(String packageName) throws IOException {
Path jarPath = Files.createTempFile(packageName + "-", "-observability-symbols.jar");
Manifest manifest = new Manifest();
manifest.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION, "1.0");
JarOutputStream jarOutputStream = new JarOutputStream(new BufferedOutputStream(
new FileOutputStream(jarPath.toFile())), manifest);
jarOutputStream.close();
return jarPath;
}
public static void assembleExecutableJar(Manifest manifest,
List<CompiledJarFile> compiledPackageJarList,
Path targetPath) throws IOException {
HashSet<String> copiedEntries = new HashSet<>();
try (ZipArchiveOutputStream outStream = new ZipArchiveOutputStream(
new BufferedOutputStream(new FileOutputStream(targetPath.toString())))) {
copyRuntimeJar(outStream, getBallerinaRTJarPath(), copiedEntries);
JarArchiveEntry e = new JarArchiveEntry(JarFile.MANIFEST_NAME);
outStream.putArchiveEntry(e);
manifest.write(new BufferedOutputStream(outStream));
outStream.closeArchiveEntry();
for (CompiledJarFile compiledJarFile : compiledPackageJarList) {
for (Map.Entry<String, byte[]> keyVal : compiledJarFile.getJarEntries().entrySet()) {
copyEntry(copiedEntries, outStream, keyVal);
}
}
}
}
private static void copyEntry(HashSet<String> copiedEntries,
ZipArchiveOutputStream outStream,
Map.Entry<String, byte[]> keyVal) throws IOException {
String entryName = keyVal.getKey();
if (!isCopiedOrExcludedEntry(entryName, copiedEntries)) {
byte[] entryContent = keyVal.getValue();
JarArchiveEntry entry = new JarArchiveEntry(entryName);
outStream.putArchiveEntry(entry);
outStream.write(entryContent);
outStream.closeArchiveEntry();
}
}
/**
* Copies a given jar file into the executable fat jar.
*
* @param ballerinaRTJarPath Ballerina runtime jar path.
* @throws IOException If jar file copying is failed.
*/
public static void copyRuntimeJar(ZipArchiveOutputStream outStream,
Path ballerinaRTJarPath,
HashSet<String> copiedEntries) throws IOException {
HashMap<String, StringBuilder> services = new HashMap<>();
ZipFile zipFile = new ZipFile(ballerinaRTJarPath.toString());
ZipArchiveEntryPredicate predicate = entry -> {
String entryName = entry.getName();
if (entryName.equals("META-INF/MANIFEST.MF")) {
return false;
}
if (entryName.startsWith("META-INF/services")) {
StringBuilder s = services.get(entryName);
if (s == null) {
s = new StringBuilder();
services.put(entryName, s);
}
char c = '\n';
int len;
try (BufferedInputStream inStream = new BufferedInputStream(zipFile.getInputStream(entry))) {
while ((len = inStream.read()) != -1) {
c = (char) len;
s.append(c);
}
} catch (IOException e) {
throw new ProjectException(e);
}
if (c != '\n') {
s.append('\n');
}
return false;
}
if (isCopiedOrExcludedEntry(entryName, copiedEntries)) {
return false;
}
copiedEntries.add(entryName);
return true;
};
zipFile.copyRawEntries(outStream, predicate);
zipFile.close();
for (Map.Entry<String, StringBuilder> entry : services.entrySet()) {
String s = entry.getKey();
StringBuilder service = entry.getValue();
JarArchiveEntry e = new JarArchiveEntry(s);
outStream.putArchiveEntry(e);
outStream.write(service.toString().getBytes(StandardCharsets.UTF_8));
outStream.closeArchiveEntry();
}
}
private static boolean isCopiedOrExcludedEntry(String entryName, HashSet<String> copiedEntries) {
return copiedEntries.contains(entryName) ||
excludeExtensions.contains(entryName.substring(entryName.lastIndexOf(".") + 1));
}
/**
* Construct and return the thin jar name of the provided module.
*
* @param module Module instance
* @return the name of the thin jar
*/
public static String getJarFileName(Module module) {
String jarName;
if (module.packageInstance().manifest().org().anonymous()) {
DocumentId documentId = module.documentIds().iterator().next();
String documentName = module.document(documentId).name();
jarName = getFileNameWithoutExtension(documentName);
} else {
ModuleName moduleName = module.moduleName();
if (moduleName.isDefaultModuleName()) {
jarName = moduleName.packageName().toString();
} else {
jarName = moduleName.moduleNamePart();
}
}
return jarName;
}
/**
* Construct and return the thin jar moduleName.
*
* @param org organization
* @param moduleName module name
* @param version version
* @return the moduleName of the thin jar
*/
public static String getThinJarFileName(PackageOrg org, String moduleName, PackageVersion version) {
return org.value() + "-" + moduleName + "-" + version.value();
}
/**
* Create and get the home repository path.
*
* @return home repository path
*/
public static Path createAndGetHomeReposPath() {
Path homeRepoPath;
String homeRepoDir = System.getenv(ProjectConstants.HOME_REPO_ENV_KEY);
if (homeRepoDir == null || homeRepoDir.isEmpty()) {
String userHomeDir = System.getProperty(USER_HOME);
if (userHomeDir == null || userHomeDir.isEmpty()) {
throw new BLangCompilerException("Error creating home repository: unable to get user home directory");
}
homeRepoPath = Paths.get(userHomeDir, ProjectConstants.HOME_REPO_DEFAULT_DIRNAME);
} else {
homeRepoPath = Paths.get(homeRepoDir);
}
homeRepoPath = homeRepoPath.toAbsolutePath();
if (Files.exists(homeRepoPath) && !Files.isDirectory(homeRepoPath, LinkOption.NOFOLLOW_LINKS)) {
throw new BLangCompilerException("Home repository is not a directory: " + homeRepoPath.toString());
}
return homeRepoPath;
}
/**
* Check if a ballerina module exist.
* @param projectPath project path
* @param moduleName module name
* @return module exist
*/
public static boolean isModuleExist(Path projectPath, String moduleName) {
Path modulePath = projectPath.resolve(ProjectConstants.MODULES_ROOT).resolve(moduleName);
return Files.exists(modulePath);
}
/**
* Initialize proxy if proxy is available in settings.toml.
*
* @param proxy toml model proxy
* @return proxy
*/
public static Proxy initializeProxy(io.ballerina.projects.internal.model.Proxy proxy) {
if (proxy != null && !"".equals(proxy.host()) && proxy.port() > 0) {
InetSocketAddress proxyInet = new InetSocketAddress(proxy.host(), proxy.port());
if (!"".equals(proxy.username()) && "".equals(proxy.password())) {
Authenticator authenticator = new URIDryConverter.RemoteAuthenticator();
Authenticator.setDefault(authenticator);
}
return new Proxy(Proxy.Type.HTTP, proxyInet);
}
return null;
}
/**
* Read the access token generated for the CLI.
*
* @return access token for generated for the CLI
*/
public static String getAccessTokenOfCLI(Settings settings) {
String tokenAsEnvVar = System.getenv(ProjectConstants.BALLERINA_CENTRAL_ACCESS_TOKEN);
if (tokenAsEnvVar != null) {
return tokenAsEnvVar;
}
if (settings.getCentral() != null) {
return settings.getCentral().getAccessToken();
}
return "";
}
public static void checkWritePermission(Path path) {
if (!path.toFile().canWrite()) {
throw new ProjectException("'" + path.normalize() + "' does not have write permissions");
}
}
public static void checkReadPermission(Path path) {
if (!path.toFile().canRead()) {
throw new ProjectException("'" + path.normalize() + "' does not have read permissions");
}
}
public static void checkExecutePermission(Path path) {
if (!path.toFile().canExecute()) {
throw new ProjectException("'" + path.normalize() + "' does not have execute permissions");
}
}
/**
* Get `Dependencies.toml` content as a string.
*
* @param pkgGraphDependencies direct dependencies of the package dependency graph
* @return Dependencies.toml` content
*/
public static String getDependenciesTomlContent(Collection<ResolvedPackageDependency> pkgGraphDependencies) {
String comment = "
"
"
StringBuilder content = new StringBuilder(comment);
content.append("[ballerina]\n");
content.append("version = \"").append(RepoUtils.getBallerinaShortVersion()).append("\"\n");
content.append("dependencies-toml-version = \"").append(ProjectConstants.DEPENDENCIES_TOML_VERSION)
.append("\"\n\n");
pkgGraphDependencies.forEach(graphDependency -> {
PackageDescriptor descriptor = graphDependency.packageInstance().descriptor();
addDependencyContent(content, descriptor.org().value(), descriptor.name().value(),
descriptor.version().value().toString(), null, Collections.emptyList(),
Collections.emptyList());
content.append("\n");
});
return String.valueOf(content);
}
/**
* Get `Dependencies.toml` content as a string.
*
* @param pkgDependencies direct dependencies of the package dependency graph
* @return Dependencies.toml` content
*/
public static String getDependenciesTomlContent(List<Dependency> pkgDependencies) {
String comment = "
+ "
+ "
StringBuilder content = new StringBuilder(comment);
content.append("[ballerina]\n");
content.append("dependencies-toml-version = \"").append(ProjectConstants.DEPENDENCIES_TOML_VERSION)
.append("\"\n\n");
pkgDependencies.forEach(dependency -> {
addDependencyContent(content, dependency.getOrg(), dependency.getName(), dependency.getVersion(),
getDependencyScope(dependency.getScope()), dependency.getDependencies(),
dependency.getModules());
content.append("\n");
});
return String.valueOf(content);
}
private static void addDependencyContent(StringBuilder content, String org, String name, String version,
String scope, List<Dependency> dependencies,
List<Dependency.Module> modules) {
content.append("[[package]]\n");
content.append("org = \"").append(org).append("\"\n");
content.append("name = \"").append(name).append("\"\n");
content.append("version = \"").append(version).append("\"\n");
if (scope != null) {
content.append("scope = \"").append(scope).append("\"\n");
}
if (!dependencies.isEmpty()) {
var count = 1;
content.append("dependencies = [\n");
for (Dependency transDependency : dependencies) {
content.append("\t{");
content.append("org = \"").append(transDependency.getOrg()).append("\", ");
content.append("name = \"").append(transDependency.getName()).append("\"");
content.append("}");
if (count != dependencies.size()) {
content.append(",\n");
} else {
content.append("\n");
}
count++;
}
content.append("]\n");
}
if (!modules.isEmpty()) {
var count = 1;
content.append("modules = [\n");
for (Dependency.Module module : modules) {
content.append("\t{");
content.append("org = \"").append(module.org()).append("\", ");
content.append("packageName = \"").append(module.packageName()).append("\", ");
content.append("moduleName = \"").append(module.moduleName()).append("\"");
content.append("}");
if (count != modules.size()) {
content.append(",\n");
} else {
content.append("\n");
}
count++;
}
content.append("]\n");
}
}
private static String getDependencyScope(PackageDependencyScope scope) {
if (scope == PackageDependencyScope.TEST_ONLY) {
return "testOnly";
}
return null;
}
public static List<PackageName> getPossiblePackageNames(PackageOrg packageOrg, String moduleName) {
var pkgNameBuilder = new StringJoiner(".");
if (isBuiltInPackage(packageOrg, moduleName)) {
pkgNameBuilder.add(moduleName);
return Collections.singletonList(PackageName.from(pkgNameBuilder.toString()));
}
String[] modNameParts = moduleName.split("\\.");
List<PackageName> possiblePkgNames = new ArrayList<>(modNameParts.length);
for (String modNamePart : modNameParts) {
pkgNameBuilder.add(modNamePart);
possiblePkgNames.add(PackageName.from(pkgNameBuilder.toString()));
}
return possiblePkgNames;
}
public static boolean isBuiltInPackage(PackageOrg org, String moduleName) {
return (org.isBallerinaOrg() && moduleName.startsWith("lang.")) ||
(org.value().equals(Names.BALLERINA_INTERNAL_ORG.getValue())) ||
(org.isBallerinaOrg() && moduleName.equals(Names.JAVA.getValue())) ||
(org.isBallerinaOrg() && moduleName.equals(Names.TEST.getValue()));
}
public static boolean isLangLibPackage(PackageOrg org, PackageName packageName) {
return (org.isBallerinaOrg() && packageName.value().startsWith("lang.")) ||
(org.isBallerinaOrg() && packageName.value().equals(Names.JAVA.getValue()));
}
/**
* Extracts a .bala file into the provided destination directory.
*
* @param balaFilePath .bala file path
* @param balaFileDestPath directory into which the .bala should be extracted
* @throws IOException if extraction fails
*/
public static void extractBala(Path balaFilePath, Path balaFileDestPath) throws IOException {
if (Files.exists(balaFileDestPath) && Files.isDirectory(balaFilePath)) {
deleteDirectory(balaFileDestPath);
} else {
Files.createDirectories(balaFileDestPath);
}
byte[] buffer = new byte[1024 * 4];
try (FileInputStream fileInputStream = new FileInputStream(balaFilePath.toString())) {
try (ZipInputStream zipInputStream = new ZipInputStream(fileInputStream)) {
ZipEntry zipEntry = zipInputStream.getNextEntry();
while (zipEntry != null) {
String fileName = zipEntry.getName();
Path outputPath = balaFileDestPath.resolve(fileName);
if (zipEntry.isDirectory()) {
Files.createDirectories(outputPath);
zipEntry = zipInputStream.getNextEntry();
continue;
}
Files.createDirectories(Optional.of(outputPath.getParent()).get());
try (FileOutputStream fileOutputStream = new FileOutputStream(outputPath.toFile())) {
int len;
while ((len = zipInputStream.read(buffer)) > 0) {
fileOutputStream.write(buffer, 0, len);
}
}
zipEntry = zipInputStream.getNextEntry();
}
zipInputStream.closeEntry();
}
}
}
/**
* Delete the given directory along with all files and sub directories.
*
* @param directoryPath Directory to delete.
*/
public static boolean deleteDirectory(Path directoryPath) {
File directory = new File(String.valueOf(directoryPath));
if (directory.isDirectory()) {
File[] files = directory.listFiles();
if (files != null) {
for (File f : files) {
boolean success = deleteDirectory(f.toPath());
if (!success) {
return false;
}
}
}
}
return directory.delete();
}
/**
* Read build file from given path.
*
* @param buildJsonPath build file path
* @return build json object
* @throws JsonSyntaxException incorrect json syntax
* @throws IOException if json read fails
*/
public static BuildJson readBuildJson(Path buildJsonPath) throws JsonSyntaxException, IOException {
try (BufferedReader bufferedReader = Files.newBufferedReader(buildJsonPath)) {
return new Gson().fromJson(bufferedReader, BuildJson.class);
}
}
/**
* Check project files are updated.
*
* @param project project instance
* @return is project files are updated
*/
public static boolean isProjectUpdated(Project project) {
Path observeJarCachePath = project.targetDir()
.resolve(CACHES_DIR_NAME)
.resolve(project.currentPackage().packageOrg().value())
.resolve(project.currentPackage().packageName().value())
.resolve(project.currentPackage().packageVersion().value().toString())
.resolve("observe")
.resolve(project.currentPackage().packageOrg().value() + "-"
+ project.currentPackage().packageName().value()
+ "-observability-symbols.jar");
if (project.buildOptions().observabilityIncluded() &&
!observeJarCachePath.toFile().exists()) {
return true;
}
Path buildFile = project.sourceRoot().resolve(TARGET_DIR_NAME).resolve(BUILD_FILE);
if (buildFile.toFile().exists()) {
try {
BuildJson buildJson = readBuildJson(buildFile);
long lastProjectUpdatedTime = FileUtils.lastModifiedTimeOfBalProject(project.sourceRoot());
PackageName packageName = project.currentPackage().packageName();
if (buildJson == null
|| buildJson.getLastModifiedTime() == null
|| buildJson.getLastModifiedTime().entrySet().isEmpty()
|| buildJson.getLastModifiedTime().get(packageName.value()) == null) {
return true;
}
long defaultModuleLastModifiedTime = buildJson.getLastModifiedTime()
.get(packageName.value());
return lastProjectUpdatedTime > defaultModuleLastModifiedTime;
} catch (IOException e) {
try {
Files.deleteIfExists(buildFile);
} catch (IOException ex) {
}
return true;
}
}
return true;
}
/**
* Get temporary target path.
*
* @return temporary target path
*/
public static String getTemporaryTargetPath() {
return Paths.get(System.getProperty("java.io.tmpdir"))
.resolve("ballerina-cache" + System.nanoTime()).toString();
}
/**
* Write build file from given object.
*
* @param buildFilePath build file path
* @param buildJson BuildJson object
*/
public static void writeBuildFile(Path buildFilePath, BuildJson buildJson) {
Gson gson = new GsonBuilder().setPrettyPrinting().create();
if (!buildFilePath.toFile().canWrite()) {
throw new ProjectException("'build' file does not have write permissions");
}
try {
Files.write(buildFilePath, Collections.singleton(gson.toJson(buildJson)));
} catch (IOException e) {
throw new ProjectException("Failed to write to the '" + BUILD_FILE + "' file");
}
}
/**
* Compare and get latest of two package versions.
*
* @param v1 package version 1
* @param v2 package version 2
* @return latest package version from given two package versions
*/
public static PackageVersion getLatest(PackageVersion v1, PackageVersion v2) {
SemanticVersion semVer1 = v1.value();
SemanticVersion semVer2 = v2.value();
boolean isV1PreReleaseVersion = semVer1.isPreReleaseVersion();
boolean isV2PreReleaseVersion = semVer2.isPreReleaseVersion();
if (isV1PreReleaseVersion ^ isV2PreReleaseVersion) {
return isV1PreReleaseVersion ? v2 : v1;
} else {
return semVer1.greaterThanOrEqualTo(semVer2) ? v1 : v2;
}
}
/**
* Checks if a given project does not contain ballerina source files or test files.
*
* @param project project for checking for emptiness
* @return true if the project is empty
*/
public static boolean isProjectEmpty(Project project) {
for (ModuleId moduleId : project.currentPackage().moduleIds()) {
Module module = project.currentPackage().module(moduleId);
if (!module.documentIds().isEmpty() || !module.testDocumentIds().isEmpty()) {
return false;
}
}
return true;
}
/**
* Given a list of patterns in include field, find the directories and files in the package that match the patterns.
*
* @param patterns list of string patterns to be matched
* @return the list of matching paths
*/
public static List<Path> getPathsMatchingIncludePatterns(List<String> patterns, Path packageRoot) {
List<Path> allMatchingPaths = new ArrayList<>();
for (String pattern : patterns) {
if (pattern.startsWith("!")) {
removeNegatedIncludePaths(pattern.substring(1), allMatchingPaths);
} else {
addMatchingIncludePaths(pattern, allMatchingPaths, packageRoot);
}
}
return allMatchingPaths;
}
private static void removeNegatedIncludePaths(String pattern, List<Path> allMatchingPaths) {
String combinedPattern = getGlobFormatPattern(pattern);
Stream<Path> pathStream = allMatchingPaths.stream();
List<Path> patternPaths = filterPathStream(pathStream, combinedPattern);
allMatchingPaths.removeAll(patternPaths);
}
private static void addMatchingIncludePaths(String pattern, List<Path> allMatchingPaths, Path packageRoot) {
String combinedPattern = getGlobFormatPattern(pattern);
try (Stream<Path> pathStream = Files.walk(packageRoot)) {
List<Path> patternPaths = filterPathStream(pathStream, combinedPattern);
for (Path absolutePath : patternPaths) {
if (isCorrectPatternPathMatch(absolutePath, packageRoot, pattern)) {
Path relativePath = packageRoot.relativize(absolutePath);
allMatchingPaths.add(relativePath);
}
}
} catch (IOException e) {
throw new ProjectException("Failed to read files matching the include pattern '" + pattern + "': " +
e.getMessage(), e);
}
}
private static boolean isCorrectPatternPathMatch(Path absolutePath, Path packageRoot, String pattern) {
Path relativePath = packageRoot.relativize(absolutePath);
boolean correctMatch = true;
if (relativePath.startsWith(TARGET_DIR_NAME)) {
correctMatch = false;
} else if (pattern.startsWith("/") && !packageRoot.equals(absolutePath.getParent())) {
correctMatch = false;
} else if (pattern.endsWith("/") && absolutePath.toFile().isFile()) {
correctMatch = false;
}
return correctMatch;
}
private static List<Path> filterPathStream(Stream<Path> pathStream, String combinedPattern) {
return pathStream.filter(
FileSystems.getDefault().getPathMatcher("glob:" + combinedPattern)::matches)
.collect(Collectors.toList());
}
private static String getGlobFormatPattern(String pattern) {
String patternPrefix = getPatternPrefix(pattern);
String globPattern = removeTrailingSlashes(pattern);
return patternPrefix + globPattern;
}
private static String getPatternPrefix(String pattern) {
if (pattern.startsWith("/")) {
return "**";
}
return "**/";
}
private static String removeTrailingSlashes(String pattern) {
while (pattern.endsWith("/")) {
pattern = pattern.substring(0, pattern.length() - 1);
}
return pattern;
}
/**
* Return the path of a bala with the available platform directory (java11 or any).
*
* @param balaDirPath path to the bala directory
* @param org org name of the bala
* @param name package name of the bala
* @param version version of the bala
* @return path of the bala file
*/
public static Path getPackagePath(Path balaDirPath, String org, String name, String version) {
Path balaPath = balaDirPath.resolve(
ProjectUtils.getRelativeBalaPath(org, name, version, null));
if (!Files.exists(balaPath)) {
balaPath = balaDirPath.resolve(
ProjectUtils.getRelativeBalaPath(org, name, version, JvmTarget.JAVA_11.code()));
}
return balaPath;
}
public static void writeModule(ModuleConfig moduleConfig, Path modulesRoot) throws IOException {
Path moduleDirPath = modulesRoot.resolve(moduleConfig.moduleDescriptor().name().moduleNamePart());
Files.createDirectories(moduleDirPath);
for (DocumentConfig sourceDoc : moduleConfig.sourceDocs()) {
Files.writeString(moduleDirPath.resolve(sourceDoc.name()), sourceDoc.content());
}
Path moduleTestDirPath = moduleDirPath.resolve(ProjectConstants.TEST_DIR_NAME);
Files.createDirectories(moduleTestDirPath);
for (DocumentConfig testSourceDoc : moduleConfig.testSourceDocs()) {
Files.writeString(moduleTestDirPath.resolve(testSourceDoc.name()), testSourceDoc.content());
}
Path moduleResourcesDirPath = moduleDirPath.resolve(ProjectConstants.RESOURCE_DIR_NAME);
Files.createDirectories(moduleTestDirPath);
for (ResourceConfig resource : moduleConfig.resources()) {
Files.write(moduleResourcesDirPath.resolve(resource.name()), resource.content().orElse(null));
}
}
public static ModuleConfig createModuleConfig (String moduleName, Project project) {
ModuleData moduleData = ProjectFiles.loadModule(
project.sourceRoot().resolve(ProjectConstants.GENERATED_MODULES_ROOT).resolve(moduleName));
ModuleId moduleId = ModuleId.create(moduleName, project.currentPackage().packageId());
List<DocumentConfig> documentConfigs = new ArrayList<>();
List<DocumentConfig> testDocumentConfigs = new ArrayList<>();
for (DocumentData sourceDoc : moduleData.sourceDocs()) {
DocumentId documentId = DocumentId.create(sourceDoc.name(), moduleId);
documentConfigs.add(DocumentConfig.from(documentId, sourceDoc.content(), sourceDoc.name()));
}
for (DocumentData sourceDoc : moduleData.testSourceDocs()) {
DocumentId documentId = DocumentId.create(sourceDoc.name(), moduleId);
testDocumentConfigs.add(DocumentConfig.from(documentId, sourceDoc.content(), sourceDoc.name()));
}
ModuleDescriptor moduleDescriptor = ModuleDescriptor.from(
ModuleName.from(project.currentPackage().packageName(), moduleName),
project.currentPackage().descriptor());
return ModuleConfig.from(
moduleId, moduleDescriptor, documentConfigs, testDocumentConfigs, null, new ArrayList<>());
}
} | class ProjectUtils {
private static final String USER_HOME = "user.home";
private static final Pattern separatedIdentifierPattern = Pattern.compile("^[a-zA-Z0-9_.]*$");
private static final Pattern onlyDotsPattern = Pattern.compile("^[.]+$");
private static final Pattern orgNamePattern = Pattern.compile("^[a-zA-Z0-9_]*$");
/**
* Validates the org-name.
*
* @param orgName The org-name
* @return True if valid org-name or package name, else false.
*/
public static boolean validateOrgName(String orgName) {
Matcher m = orgNamePattern.matcher(orgName);
return m.matches();
}
/**
* Validates the package name.
*
* @param packageName The package name.
* @return True if valid package name, else false.
*/
public static boolean validatePackageName(String packageName) {
return validateDotSeparatedIdentifiers(packageName)
&& validateUnderscoresOfName(packageName)
&& validateInitialNumericsOfName(packageName);
}
/**
* Validates the package name.
*
* @param orgName The organization name.
* @param packageName The package name.
* @return True if valid package name, else false.
*/
public static boolean validatePackageName(String orgName, String packageName) {
if (isLangLibPackage(PackageOrg.from(orgName), PackageName.from(packageName))) {
return validateDotSeparatedIdentifiers(packageName)
&& validateInitialNumericsOfName(packageName);
}
return validateDotSeparatedIdentifiers(packageName)
&& validateUnderscoresOfName(packageName)
&& validateInitialNumericsOfName(packageName);
}
/**
* Validates the module name.
*
* @param moduleName The module name.
* @return True if valid module name, else false.
*/
public static boolean validateModuleName(String moduleName) {
return validateDotSeparatedIdentifiers(moduleName);
}
/**
* Validates the organization, package or module name length.
* Maximum length is 256 characters.
*
* @param name name.
* @return true if valid name length, else false.
*/
public static boolean validateNameLength(String name) {
return name.length() <= 256;
}
/**
* Checks the organization, package or module name has initial, trailing or consecutive underscores.
*
* @param name name.
* @return true if name does not have initial, trailing or consecutive underscores, else false.
*/
public static boolean validateUnderscoresOfName(String name) {
return !(name.startsWith("_") || name.endsWith("_") || name.contains("__"));
}
/**
* Checks the organization, package or module name has initial numeric characters.
*
* @param name name.
* @return true if name does not have initial numeric characters, else false.
*/
public static boolean validateInitialNumericsOfName(String name) {
return !name.matches("[0-9].*");
}
/**
* Remove last character of the given string.
*
* @param aString given string
* @return string removed last character
*/
public static String removeLastChar(String aString) {
return aString.substring(0, aString.length() - 1);
}
/**
* Remove first character of the given string.
*
* @param aString given string
* @return string removed last character
*/
public static String removeFirstChar(String aString) {
return aString.substring(1);
}
public static String getPackageValidationError(String packageName) {
if (!validateDotSeparatedIdentifiers(packageName)) {
return "Package name can only contain alphanumerics and underscores.";
} else if (!validateInitialNumericsOfName(packageName)) {
return "Package name cannot have initial numeric characters.";
} else {
return getValidateUnderscoreError(packageName, "Package");
}
}
/**
* Get specific error message when organization, package or module name has initial, trailing or
* consecutive underscores.
*
* @param name name.
* @param packageOrModule package or module.
* @return specific error message.
*/
public static String getValidateUnderscoreError(String name, String packageOrModule) {
if (name.startsWith("_")) {
return packageOrModule + " name cannot have initial underscore characters.";
} else if (name.endsWith("_")) {
return packageOrModule + " name cannot have trailing underscore characters.";
} else {
return packageOrModule + " name cannot have consecutive underscore characters.";
}
}
/**
* Find the project root by recursively up to the root.
*
* @param filePath project path
* @return project root
*/
public static Path findProjectRoot(Path filePath) {
if (filePath != null) {
filePath = filePath.toAbsolutePath().normalize();
if (filePath.toFile().isDirectory()) {
if (Files.exists(filePath.resolve(BALLERINA_TOML))) {
return filePath;
}
}
return findProjectRoot(filePath.getParent());
}
return null;
}
/**
* Checks if the path is a Ballerina project.
*
* @param sourceRoot source root of the project.
* @return true if the directory is a project repo, false if its the home repo
*/
public static boolean isBallerinaProject(Path sourceRoot) {
Path ballerinaToml = sourceRoot.resolve(BALLERINA_TOML);
return Files.isDirectory(sourceRoot)
&& Files.exists(ballerinaToml)
&& Files.isRegularFile(ballerinaToml);
}
/**
* Guess organization name based on user name in system.
*
* @return organization name
*/
public static String guessOrgName() {
String guessOrgName = System.getProperty(USER_NAME);
if (guessOrgName == null) {
guessOrgName = "my_org";
} else {
if (!validateOrgName(guessOrgName)) {
guessOrgName = guessOrgName.replaceAll("[^a-zA-Z0-9_]", "_");
}
}
return guessOrgName.toLowerCase(Locale.getDefault());
}
/**
* Guess package name with valid pattern.
*
* @param packageName package name
* @param template template name
* @return package name
*/
public static String guessPkgName(String packageName, String template) {
if (!validatePackageName(packageName)) {
packageName = packageName.replaceAll("[^a-zA-Z0-9_.]", "_");
}
if (packageName.matches("[0-9].*")) {
if (template.equalsIgnoreCase("lib")) {
packageName = "lib" + packageName;
} else {
packageName = "app" + packageName;
}
}
if (packageName.startsWith("_")) {
packageName = removeFirstChar(packageName);
}
if (packageName.contains("__")) {
packageName = packageName.replaceAll("__", "_");
}
if (packageName.endsWith("_")) {
packageName = removeLastChar(packageName);
}
return packageName;
}
/**
* Guess module name with valid pattern.
*
* @param moduleName module name
* @return module name
*/
public static String guessModuleName(String moduleName) {
if (!validateModuleName(moduleName)) {
return moduleName.replaceAll("[^a-zA-Z0-9_.]", "_");
}
return moduleName;
}
public static PackageOrg defaultOrg() {
return PackageOrg.from(guessOrgName());
}
public static PackageName defaultName(Path projectPath) {
return PackageName.from(guessPkgName(Optional.ofNullable(projectPath.getFileName())
.map(Path::toString).orElse(""), "app"));
}
public static PackageVersion defaultVersion() {
return PackageVersion.from(ProjectConstants.INTERNAL_VERSION);
}
public static String getBalaName(PackageManifest pkgDesc) {
return ProjectUtils.getBalaName(pkgDesc.org().toString(),
pkgDesc.name().toString(),
pkgDesc.version().toString(),
null
);
}
public static String getBalaName(String org, String pkgName, String version, String platform) {
if (platform == null || "".equals(platform)) {
platform = "any";
}
return org + "-" + pkgName + "-" + platform + "-" + version + BLANG_COMPILED_PKG_BINARY_EXT;
}
/**
* Returns the relative path of extracted bala beginning from the package org.
*
* @param org package org
* @param pkgName package name
* @param version package version
* @param platform version, null converts to `any`
* @return relative bala path
*/
public static Path getRelativeBalaPath(String org, String pkgName, String version, String platform) {
if (platform == null || "".equals(platform)) {
platform = "any";
}
return Paths.get(org, pkgName, version, platform);
}
public static String getJarFileName(Package pkg) {
return pkg.packageOrg().toString() + "-" + pkg.packageName().toString()
+ "-" + pkg.packageVersion() + BLANG_COMPILED_JAR_EXT;
}
public static String getExecutableName(Package pkg) {
return pkg.packageName().toString() + BLANG_COMPILED_JAR_EXT;
}
public static String getOrgFromBalaName(String balaName) {
return balaName.split("-")[0];
}
public static String getPackageNameFromBalaName(String balaName) {
return balaName.split("-")[1];
}
public static String getVersionFromBalaName(String balaName) {
String versionAndExtension = balaName.split("-")[3];
int extensionIndex = versionAndExtension.indexOf(BLANG_COMPILED_PKG_BINARY_EXT);
return versionAndExtension.substring(0, extensionIndex);
}
private static final HashSet<String> excludeExtensions = new HashSet<>(Lists.of("DSA", "SF"));
public static Path getBalHomePath() {
return Paths.get(System.getProperty(BALLERINA_HOME));
}
public static Path getBallerinaRTJarPath() {
String ballerinaVersion = RepoUtils.getBallerinaPackVersion();
String runtimeJarName = "ballerina-rt-" + ballerinaVersion + BLANG_COMPILED_JAR_EXT;
return getBalHomePath().resolve("bre").resolve("lib").resolve(runtimeJarName);
}
public static List<JarLibrary> testDependencies() {
List<JarLibrary> dependencies = new ArrayList<>();
String testPkgName = "ballerina/test";
String ballerinaVersion = RepoUtils.getBallerinaPackVersion();
Path homeLibPath = getBalHomePath().resolve(BALLERINA_HOME_BRE).resolve(LIB_DIR);
String testRuntimeJarName = TEST_RUNTIME_JAR_PREFIX + ballerinaVersion + BLANG_COMPILED_JAR_EXT;
String testCoreJarName = TEST_CORE_JAR_PREFIX + ballerinaVersion + BLANG_COMPILED_JAR_EXT;
String langJarName = "ballerina-lang-" + ballerinaVersion + BLANG_COMPILED_JAR_EXT;
Path testRuntimeJarPath = homeLibPath.resolve(testRuntimeJarName);
Path testCoreJarPath = homeLibPath.resolve(testCoreJarName);
Path langJarPath = homeLibPath.resolve(langJarName);
Path jacocoCoreJarPath = homeLibPath.resolve(JACOCO_CORE_JAR);
Path jacocoReportJarPath = homeLibPath.resolve(JACOCO_REPORT_JAR);
Path asmJarPath = homeLibPath.resolve(ASM_JAR);
Path asmTreeJarPath = homeLibPath.resolve(ASM_TREE_JAR);
Path asmCommonsJarPath = homeLibPath.resolve(ASM_COMMONS_JAR);
Path diffUtilsJarPath = homeLibPath.resolve(DIFF_UTILS_JAR);
dependencies.add(new JarLibrary(testRuntimeJarPath, PlatformLibraryScope.TEST_ONLY, testPkgName));
dependencies.add(new JarLibrary(testCoreJarPath, PlatformLibraryScope.TEST_ONLY, testPkgName));
dependencies.add(new JarLibrary(langJarPath, PlatformLibraryScope.TEST_ONLY, testPkgName));
dependencies.add(new JarLibrary(jacocoCoreJarPath, PlatformLibraryScope.TEST_ONLY, testPkgName));
dependencies.add(new JarLibrary(jacocoReportJarPath, PlatformLibraryScope.TEST_ONLY, testPkgName));
dependencies.add(new JarLibrary(asmJarPath, PlatformLibraryScope.TEST_ONLY, testPkgName));
dependencies.add(new JarLibrary(asmTreeJarPath, PlatformLibraryScope.TEST_ONLY, testPkgName));
dependencies.add(new JarLibrary(asmCommonsJarPath, PlatformLibraryScope.TEST_ONLY, testPkgName));
dependencies.add(new JarLibrary(diffUtilsJarPath, PlatformLibraryScope.TEST_ONLY, testPkgName));
return dependencies;
}
public static Path generateObservabilitySymbolsJar(String packageName) throws IOException {
Path jarPath = Files.createTempFile(packageName + "-", "-observability-symbols.jar");
Manifest manifest = new Manifest();
manifest.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION, "1.0");
JarOutputStream jarOutputStream = new JarOutputStream(new BufferedOutputStream(
new FileOutputStream(jarPath.toFile())), manifest);
jarOutputStream.close();
return jarPath;
}
public static void assembleExecutableJar(Manifest manifest,
List<CompiledJarFile> compiledPackageJarList,
Path targetPath) throws IOException {
HashSet<String> copiedEntries = new HashSet<>();
try (ZipArchiveOutputStream outStream = new ZipArchiveOutputStream(
new BufferedOutputStream(new FileOutputStream(targetPath.toString())))) {
copyRuntimeJar(outStream, getBallerinaRTJarPath(), copiedEntries);
JarArchiveEntry e = new JarArchiveEntry(JarFile.MANIFEST_NAME);
outStream.putArchiveEntry(e);
manifest.write(new BufferedOutputStream(outStream));
outStream.closeArchiveEntry();
for (CompiledJarFile compiledJarFile : compiledPackageJarList) {
for (Map.Entry<String, byte[]> keyVal : compiledJarFile.getJarEntries().entrySet()) {
copyEntry(copiedEntries, outStream, keyVal);
}
}
}
}
private static void copyEntry(HashSet<String> copiedEntries,
ZipArchiveOutputStream outStream,
Map.Entry<String, byte[]> keyVal) throws IOException {
String entryName = keyVal.getKey();
if (!isCopiedOrExcludedEntry(entryName, copiedEntries)) {
byte[] entryContent = keyVal.getValue();
JarArchiveEntry entry = new JarArchiveEntry(entryName);
outStream.putArchiveEntry(entry);
outStream.write(entryContent);
outStream.closeArchiveEntry();
}
}
/**
* Copies a given jar file into the executable fat jar.
*
* @param ballerinaRTJarPath Ballerina runtime jar path.
* @throws IOException If jar file copying is failed.
*/
public static void copyRuntimeJar(ZipArchiveOutputStream outStream,
Path ballerinaRTJarPath,
HashSet<String> copiedEntries) throws IOException {
HashMap<String, StringBuilder> services = new HashMap<>();
ZipFile zipFile = new ZipFile(ballerinaRTJarPath.toString());
ZipArchiveEntryPredicate predicate = entry -> {
String entryName = entry.getName();
if (entryName.equals("META-INF/MANIFEST.MF")) {
return false;
}
if (entryName.startsWith("META-INF/services")) {
StringBuilder s = services.get(entryName);
if (s == null) {
s = new StringBuilder();
services.put(entryName, s);
}
char c = '\n';
int len;
try (BufferedInputStream inStream = new BufferedInputStream(zipFile.getInputStream(entry))) {
while ((len = inStream.read()) != -1) {
c = (char) len;
s.append(c);
}
} catch (IOException e) {
throw new ProjectException(e);
}
if (c != '\n') {
s.append('\n');
}
return false;
}
if (isCopiedOrExcludedEntry(entryName, copiedEntries)) {
return false;
}
copiedEntries.add(entryName);
return true;
};
zipFile.copyRawEntries(outStream, predicate);
zipFile.close();
for (Map.Entry<String, StringBuilder> entry : services.entrySet()) {
String s = entry.getKey();
StringBuilder service = entry.getValue();
JarArchiveEntry e = new JarArchiveEntry(s);
outStream.putArchiveEntry(e);
outStream.write(service.toString().getBytes(StandardCharsets.UTF_8));
outStream.closeArchiveEntry();
}
}
private static boolean isCopiedOrExcludedEntry(String entryName, HashSet<String> copiedEntries) {
return copiedEntries.contains(entryName) ||
excludeExtensions.contains(entryName.substring(entryName.lastIndexOf(".") + 1));
}
/**
* Construct and return the thin jar name of the provided module.
*
* @param module Module instance
* @return the name of the thin jar
*/
public static String getJarFileName(Module module) {
String jarName;
if (module.packageInstance().manifest().org().anonymous()) {
DocumentId documentId = module.documentIds().iterator().next();
String documentName = module.document(documentId).name();
jarName = getFileNameWithoutExtension(documentName);
} else {
ModuleName moduleName = module.moduleName();
if (moduleName.isDefaultModuleName()) {
jarName = moduleName.packageName().toString();
} else {
jarName = moduleName.moduleNamePart();
}
}
return jarName;
}
/**
* Construct and return the thin jar moduleName.
*
* @param org organization
* @param moduleName module name
* @param version version
* @return the moduleName of the thin jar
*/
public static String getThinJarFileName(PackageOrg org, String moduleName, PackageVersion version) {
return org.value() + "-" + moduleName + "-" + version.value();
}
/**
* Create and get the home repository path.
*
* @return home repository path
*/
public static Path createAndGetHomeReposPath() {
Path homeRepoPath;
String homeRepoDir = System.getenv(ProjectConstants.HOME_REPO_ENV_KEY);
if (homeRepoDir == null || homeRepoDir.isEmpty()) {
String userHomeDir = System.getProperty(USER_HOME);
if (userHomeDir == null || userHomeDir.isEmpty()) {
throw new BLangCompilerException("Error creating home repository: unable to get user home directory");
}
homeRepoPath = Paths.get(userHomeDir, ProjectConstants.HOME_REPO_DEFAULT_DIRNAME);
} else {
homeRepoPath = Paths.get(homeRepoDir);
}
homeRepoPath = homeRepoPath.toAbsolutePath();
if (Files.exists(homeRepoPath) && !Files.isDirectory(homeRepoPath, LinkOption.NOFOLLOW_LINKS)) {
throw new BLangCompilerException("Home repository is not a directory: " + homeRepoPath.toString());
}
return homeRepoPath;
}
/**
* Check if a ballerina module exist.
* @param projectPath project path
* @param moduleName module name
* @return module exist
*/
public static boolean isModuleExist(Path projectPath, String moduleName) {
Path modulePath = projectPath.resolve(ProjectConstants.MODULES_ROOT).resolve(moduleName);
return Files.exists(modulePath);
}
/**
* Initialize proxy if proxy is available in settings.toml.
*
* @param proxy toml model proxy
* @return proxy
*/
public static Proxy initializeProxy(io.ballerina.projects.internal.model.Proxy proxy) {
if (proxy != null && !"".equals(proxy.host()) && proxy.port() > 0) {
InetSocketAddress proxyInet = new InetSocketAddress(proxy.host(), proxy.port());
if (!"".equals(proxy.username()) && "".equals(proxy.password())) {
Authenticator authenticator = new URIDryConverter.RemoteAuthenticator();
Authenticator.setDefault(authenticator);
}
return new Proxy(Proxy.Type.HTTP, proxyInet);
}
return null;
}
/**
* Read the access token generated for the CLI.
*
* @return access token for generated for the CLI
*/
public static String getAccessTokenOfCLI(Settings settings) {
String tokenAsEnvVar = System.getenv(ProjectConstants.BALLERINA_CENTRAL_ACCESS_TOKEN);
if (tokenAsEnvVar != null) {
return tokenAsEnvVar;
}
if (settings.getCentral() != null) {
return settings.getCentral().getAccessToken();
}
return "";
}
public static void checkWritePermission(Path path) {
if (!path.toFile().canWrite()) {
throw new ProjectException("'" + path.normalize() + "' does not have write permissions");
}
}
public static void checkReadPermission(Path path) {
if (!path.toFile().canRead()) {
throw new ProjectException("'" + path.normalize() + "' does not have read permissions");
}
}
public static void checkExecutePermission(Path path) {
if (!path.toFile().canExecute()) {
throw new ProjectException("'" + path.normalize() + "' does not have execute permissions");
}
}
/**
* Get `Dependencies.toml` content as a string.
*
* @param pkgGraphDependencies direct dependencies of the package dependency graph
* @return Dependencies.toml` content
*/
public static String getDependenciesTomlContent(Collection<ResolvedPackageDependency> pkgGraphDependencies) {
String comment = "
"
"
StringBuilder content = new StringBuilder(comment);
content.append("[ballerina]\n");
content.append("version = \"").append(RepoUtils.getBallerinaShortVersion()).append("\"\n");
content.append("dependencies-toml-version = \"").append(ProjectConstants.DEPENDENCIES_TOML_VERSION)
.append("\"\n\n");
pkgGraphDependencies.forEach(graphDependency -> {
PackageDescriptor descriptor = graphDependency.packageInstance().descriptor();
addDependencyContent(content, descriptor.org().value(), descriptor.name().value(),
descriptor.version().value().toString(), null, Collections.emptyList(),
Collections.emptyList());
content.append("\n");
});
return String.valueOf(content);
}
/**
* Get `Dependencies.toml` content as a string.
*
* @param pkgDependencies direct dependencies of the package dependency graph
* @return Dependencies.toml` content
*/
public static String getDependenciesTomlContent(List<Dependency> pkgDependencies) {
String comment = "
+ "
+ "
StringBuilder content = new StringBuilder(comment);
content.append("[ballerina]\n");
content.append("dependencies-toml-version = \"").append(ProjectConstants.DEPENDENCIES_TOML_VERSION)
.append("\"\n\n");
pkgDependencies.forEach(dependency -> {
addDependencyContent(content, dependency.getOrg(), dependency.getName(), dependency.getVersion(),
getDependencyScope(dependency.getScope()), dependency.getDependencies(),
dependency.getModules());
content.append("\n");
});
return String.valueOf(content);
}
private static void addDependencyContent(StringBuilder content, String org, String name, String version,
String scope, List<Dependency> dependencies,
List<Dependency.Module> modules) {
content.append("[[package]]\n");
content.append("org = \"").append(org).append("\"\n");
content.append("name = \"").append(name).append("\"\n");
content.append("version = \"").append(version).append("\"\n");
if (scope != null) {
content.append("scope = \"").append(scope).append("\"\n");
}
if (!dependencies.isEmpty()) {
var count = 1;
content.append("dependencies = [\n");
for (Dependency transDependency : dependencies) {
content.append("\t{");
content.append("org = \"").append(transDependency.getOrg()).append("\", ");
content.append("name = \"").append(transDependency.getName()).append("\"");
content.append("}");
if (count != dependencies.size()) {
content.append(",\n");
} else {
content.append("\n");
}
count++;
}
content.append("]\n");
}
if (!modules.isEmpty()) {
var count = 1;
content.append("modules = [\n");
for (Dependency.Module module : modules) {
content.append("\t{");
content.append("org = \"").append(module.org()).append("\", ");
content.append("packageName = \"").append(module.packageName()).append("\", ");
content.append("moduleName = \"").append(module.moduleName()).append("\"");
content.append("}");
if (count != modules.size()) {
content.append(",\n");
} else {
content.append("\n");
}
count++;
}
content.append("]\n");
}
}
private static String getDependencyScope(PackageDependencyScope scope) {
if (scope == PackageDependencyScope.TEST_ONLY) {
return "testOnly";
}
return null;
}
public static List<PackageName> getPossiblePackageNames(PackageOrg packageOrg, String moduleName) {
var pkgNameBuilder = new StringJoiner(".");
if (isBuiltInPackage(packageOrg, moduleName)) {
pkgNameBuilder.add(moduleName);
return Collections.singletonList(PackageName.from(pkgNameBuilder.toString()));
}
String[] modNameParts = moduleName.split("\\.");
List<PackageName> possiblePkgNames = new ArrayList<>(modNameParts.length);
for (String modNamePart : modNameParts) {
pkgNameBuilder.add(modNamePart);
possiblePkgNames.add(PackageName.from(pkgNameBuilder.toString()));
}
return possiblePkgNames;
}
public static boolean isBuiltInPackage(PackageOrg org, String moduleName) {
return (org.isBallerinaOrg() && moduleName.startsWith("lang.")) ||
(org.value().equals(Names.BALLERINA_INTERNAL_ORG.getValue())) ||
(org.isBallerinaOrg() && moduleName.equals(Names.JAVA.getValue())) ||
(org.isBallerinaOrg() && moduleName.equals(Names.TEST.getValue()));
}
public static boolean isLangLibPackage(PackageOrg org, PackageName packageName) {
return (org.isBallerinaOrg() && packageName.value().startsWith("lang.")) ||
(org.isBallerinaOrg() && packageName.value().equals(Names.JAVA.getValue()));
}
/**
* Extracts a .bala file into the provided destination directory.
*
* @param balaFilePath .bala file path
* @param balaFileDestPath directory into which the .bala should be extracted
* @throws IOException if extraction fails
*/
public static void extractBala(Path balaFilePath, Path balaFileDestPath) throws IOException {
if (Files.exists(balaFileDestPath) && Files.isDirectory(balaFilePath)) {
deleteDirectory(balaFileDestPath);
} else {
Files.createDirectories(balaFileDestPath);
}
byte[] buffer = new byte[1024 * 4];
try (FileInputStream fileInputStream = new FileInputStream(balaFilePath.toString())) {
try (ZipInputStream zipInputStream = new ZipInputStream(fileInputStream)) {
ZipEntry zipEntry = zipInputStream.getNextEntry();
while (zipEntry != null) {
String fileName = zipEntry.getName();
Path outputPath = balaFileDestPath.resolve(fileName);
if (zipEntry.isDirectory()) {
Files.createDirectories(outputPath);
zipEntry = zipInputStream.getNextEntry();
continue;
}
Files.createDirectories(Optional.of(outputPath.getParent()).get());
try (FileOutputStream fileOutputStream = new FileOutputStream(outputPath.toFile())) {
int len;
while ((len = zipInputStream.read(buffer)) > 0) {
fileOutputStream.write(buffer, 0, len);
}
}
zipEntry = zipInputStream.getNextEntry();
}
zipInputStream.closeEntry();
}
}
}
/**
* Delete the given directory along with all files and sub directories.
*
* @param directoryPath Directory to delete.
*/
public static boolean deleteDirectory(Path directoryPath) {
File directory = new File(String.valueOf(directoryPath));
if (directory.isDirectory()) {
File[] files = directory.listFiles();
if (files != null) {
for (File f : files) {
boolean success = deleteDirectory(f.toPath());
if (!success) {
return false;
}
}
}
}
return directory.delete();
}
/**
* Read build file from given path.
*
* @param buildJsonPath build file path
* @return build json object
* @throws JsonSyntaxException incorrect json syntax
* @throws IOException if json read fails
*/
public static BuildJson readBuildJson(Path buildJsonPath) throws JsonSyntaxException, IOException {
try (BufferedReader bufferedReader = Files.newBufferedReader(buildJsonPath)) {
return new Gson().fromJson(bufferedReader, BuildJson.class);
}
}
/**
* Check project files are updated.
*
* @param project project instance
* @return is project files are updated
*/
public static boolean isProjectUpdated(Project project) {
Path observeJarCachePath = project.targetDir()
.resolve(CACHES_DIR_NAME)
.resolve(project.currentPackage().packageOrg().value())
.resolve(project.currentPackage().packageName().value())
.resolve(project.currentPackage().packageVersion().value().toString())
.resolve("observe")
.resolve(project.currentPackage().packageOrg().value() + "-"
+ project.currentPackage().packageName().value()
+ "-observability-symbols.jar");
if (project.buildOptions().observabilityIncluded() &&
!observeJarCachePath.toFile().exists()) {
return true;
}
Path buildFile = project.sourceRoot().resolve(TARGET_DIR_NAME).resolve(BUILD_FILE);
if (buildFile.toFile().exists()) {
try {
BuildJson buildJson = readBuildJson(buildFile);
long lastProjectUpdatedTime = FileUtils.lastModifiedTimeOfBalProject(project.sourceRoot());
PackageName packageName = project.currentPackage().packageName();
if (buildJson == null
|| buildJson.getLastModifiedTime() == null
|| buildJson.getLastModifiedTime().entrySet().isEmpty()
|| buildJson.getLastModifiedTime().get(packageName.value()) == null) {
return true;
}
long defaultModuleLastModifiedTime = buildJson.getLastModifiedTime()
.get(packageName.value());
return lastProjectUpdatedTime > defaultModuleLastModifiedTime;
} catch (IOException e) {
try {
Files.deleteIfExists(buildFile);
} catch (IOException ex) {
}
return true;
}
}
return true;
}
/**
* Get temporary target path.
*
* @return temporary target path
*/
public static String getTemporaryTargetPath() {
return Paths.get(System.getProperty("java.io.tmpdir"))
.resolve("ballerina-cache" + System.nanoTime()).toString();
}
/**
* Write build file from given object.
*
* @param buildFilePath build file path
* @param buildJson BuildJson object
*/
public static void writeBuildFile(Path buildFilePath, BuildJson buildJson) {
Gson gson = new GsonBuilder().setPrettyPrinting().create();
if (!buildFilePath.toFile().canWrite()) {
throw new ProjectException("'build' file does not have write permissions");
}
try {
Files.write(buildFilePath, Collections.singleton(gson.toJson(buildJson)));
} catch (IOException e) {
throw new ProjectException("Failed to write to the '" + BUILD_FILE + "' file");
}
}
/**
* Compare and get latest of two package versions.
*
* @param v1 package version 1
* @param v2 package version 2
* @return latest package version from given two package versions
*/
public static PackageVersion getLatest(PackageVersion v1, PackageVersion v2) {
SemanticVersion semVer1 = v1.value();
SemanticVersion semVer2 = v2.value();
boolean isV1PreReleaseVersion = semVer1.isPreReleaseVersion();
boolean isV2PreReleaseVersion = semVer2.isPreReleaseVersion();
if (isV1PreReleaseVersion ^ isV2PreReleaseVersion) {
return isV1PreReleaseVersion ? v2 : v1;
} else {
return semVer1.greaterThanOrEqualTo(semVer2) ? v1 : v2;
}
}
/**
* Checks if a given project does not contain ballerina source files or test files.
*
* @param project project for checking for emptiness
* @return true if the project is empty
*/
public static boolean isProjectEmpty(Project project) {
for (ModuleId moduleId : project.currentPackage().moduleIds()) {
Module module = project.currentPackage().module(moduleId);
if (!module.documentIds().isEmpty() || !module.testDocumentIds().isEmpty()) {
return false;
}
}
return true;
}
/**
* Given a list of patterns in include field, find the directories and files in the package that match the patterns.
*
* @param patterns list of string patterns to be matched
* @return the list of matching paths
*/
public static List<Path> getPathsMatchingIncludePatterns(List<String> patterns, Path packageRoot) {
List<Path> allMatchingPaths = new ArrayList<>();
for (String pattern : patterns) {
if (pattern.startsWith("!")) {
removeNegatedIncludePaths(pattern.substring(1), allMatchingPaths);
} else {
addMatchingIncludePaths(pattern, allMatchingPaths, packageRoot);
}
}
return allMatchingPaths;
}
private static void removeNegatedIncludePaths(String pattern, List<Path> allMatchingPaths) {
String combinedPattern = getGlobFormatPattern(pattern);
Stream<Path> pathStream = allMatchingPaths.stream();
List<Path> patternPaths = filterPathStream(pathStream, combinedPattern);
allMatchingPaths.removeAll(patternPaths);
}
private static void addMatchingIncludePaths(String pattern, List<Path> allMatchingPaths, Path packageRoot) {
String combinedPattern = getGlobFormatPattern(pattern);
try (Stream<Path> pathStream = Files.walk(packageRoot)) {
List<Path> patternPaths = filterPathStream(pathStream, combinedPattern);
for (Path absolutePath : patternPaths) {
if (isCorrectPatternPathMatch(absolutePath, packageRoot, pattern)) {
Path relativePath = packageRoot.relativize(absolutePath);
allMatchingPaths.add(relativePath);
}
}
} catch (IOException e) {
throw new ProjectException("Failed to read files matching the include pattern '" + pattern + "': " +
e.getMessage(), e);
}
}
private static boolean isCorrectPatternPathMatch(Path absolutePath, Path packageRoot, String pattern) {
Path relativePath = packageRoot.relativize(absolutePath);
boolean correctMatch = true;
if (relativePath.startsWith(TARGET_DIR_NAME)) {
correctMatch = false;
} else if (pattern.startsWith("/") && !packageRoot.equals(absolutePath.getParent())) {
correctMatch = false;
} else if (pattern.endsWith("/") && absolutePath.toFile().isFile()) {
correctMatch = false;
}
return correctMatch;
}
private static List<Path> filterPathStream(Stream<Path> pathStream, String combinedPattern) {
return pathStream.filter(
FileSystems.getDefault().getPathMatcher("glob:" + combinedPattern)::matches)
.collect(Collectors.toList());
}
private static String getGlobFormatPattern(String pattern) {
String patternPrefix = getPatternPrefix(pattern);
String globPattern = removeTrailingSlashes(pattern);
return patternPrefix + globPattern;
}
private static String getPatternPrefix(String pattern) {
if (pattern.startsWith("/")) {
return "**";
}
return "**/";
}
private static String removeTrailingSlashes(String pattern) {
while (pattern.endsWith("/")) {
pattern = pattern.substring(0, pattern.length() - 1);
}
return pattern;
}
/**
* Return the path of a bala with the available platform directory (java11 or any).
*
* @param balaDirPath path to the bala directory
* @param org org name of the bala
* @param name package name of the bala
* @param version version of the bala
* @return path of the bala file
*/
public static Path getPackagePath(Path balaDirPath, String org, String name, String version) {
Path balaPath = balaDirPath.resolve(
ProjectUtils.getRelativeBalaPath(org, name, version, null));
if (!Files.exists(balaPath)) {
balaPath = balaDirPath.resolve(
ProjectUtils.getRelativeBalaPath(org, name, version, JvmTarget.JAVA_11.code()));
}
return balaPath;
}
public static void writeModule(ModuleConfig moduleConfig, Path modulesRoot) throws IOException {
Path moduleDirPath = modulesRoot.resolve(moduleConfig.moduleDescriptor().name().moduleNamePart());
Files.createDirectories(moduleDirPath);
for (DocumentConfig sourceDoc : moduleConfig.sourceDocs()) {
Files.writeString(moduleDirPath.resolve(sourceDoc.name()), sourceDoc.content());
}
Path moduleTestDirPath = moduleDirPath.resolve(ProjectConstants.TEST_DIR_NAME);
Files.createDirectories(moduleTestDirPath);
for (DocumentConfig testSourceDoc : moduleConfig.testSourceDocs()) {
Files.writeString(moduleTestDirPath.resolve(testSourceDoc.name()), testSourceDoc.content());
}
Path moduleResourcesDirPath = moduleDirPath.resolve(ProjectConstants.RESOURCE_DIR_NAME);
Files.createDirectories(moduleTestDirPath);
for (ResourceConfig resource : moduleConfig.resources()) {
Files.write(moduleResourcesDirPath.resolve(resource.name()), resource.content().orElse(null));
}
}
public static ModuleConfig createModuleConfig (String moduleName, Project project) {
ModuleData moduleData = ProjectFiles.loadModule(
project.sourceRoot().resolve(ProjectConstants.GENERATED_MODULES_ROOT).resolve(moduleName));
ModuleId moduleId = ModuleId.create(moduleName, project.currentPackage().packageId());
List<DocumentConfig> documentConfigs = new ArrayList<>();
List<DocumentConfig> testDocumentConfigs = new ArrayList<>();
for (DocumentData sourceDoc : moduleData.sourceDocs()) {
DocumentId documentId = DocumentId.create(sourceDoc.name(), moduleId);
documentConfigs.add(DocumentConfig.from(documentId, sourceDoc.content(), sourceDoc.name()));
}
for (DocumentData sourceDoc : moduleData.testSourceDocs()) {
DocumentId documentId = DocumentId.create(sourceDoc.name(), moduleId);
testDocumentConfigs.add(DocumentConfig.from(documentId, sourceDoc.content(), sourceDoc.name()));
}
ModuleDescriptor moduleDescriptor = ModuleDescriptor.from(
ModuleName.from(project.currentPackage().packageName(), moduleName),
project.currentPackage().descriptor());
return ModuleConfig.from(
moduleId, moduleDescriptor, documentConfigs, testDocumentConfigs, null, new ArrayList<>());
}
} |
The SDK harness issues concurrent state access for different keys? | public Iterable<V> get(K key, W window) {
synchronized (keyedStateBackend) {
prepareStateBackend(key, keyCoder);
StateNamespace namespace = StateNamespaces.window(windowCoder, window);
BagState<V> bagState =
stateInternals.state(namespace, StateTags.bag(userStateId, valueCoder));
return bagState.read();
}
} | synchronized (keyedStateBackend) { | public Iterable<V> get(K key, W window) {
prepareStateBackend(key, keyCoder);
StateNamespace namespace = StateNamespaces.window(windowCoder, window);
BagState<V> bagState =
stateInternals.state(namespace, StateTags.bag(userStateId, valueCoder));
return bagState.read();
} | class BagUserStateFactory
implements StateRequestHandlers.BagUserStateHandlerFactory {
private final StateInternals stateInternals;
private final KeyedStateBackend<ByteBuffer> keyedStateBackend;
private BagUserStateFactory(
StateInternals stateInternals, KeyedStateBackend<ByteBuffer> keyedStateBackend) {
this.stateInternals = stateInternals;
this.keyedStateBackend = keyedStateBackend;
}
@Override
public <K, V, W extends BoundedWindow>
StateRequestHandlers.BagUserStateHandler<K, V, W> forUserState(
String pTransformId,
String userStateId,
Coder<K> keyCoder,
Coder<V> valueCoder,
Coder<W> windowCoder) {
return new StateRequestHandlers.BagUserStateHandler<K, V, W>() {
@Override
@Override
public void append(K key, W window, Iterator<V> values) {
synchronized (keyedStateBackend) {
prepareStateBackend(key, keyCoder);
StateNamespace namespace = StateNamespaces.window(windowCoder, window);
BagState<V> bagState =
stateInternals.state(namespace, StateTags.bag(userStateId, valueCoder));
while (values.hasNext()) {
bagState.add(values.next());
}
}
}
@Override
public void clear(K key, W window) {
synchronized (keyedStateBackend) {
prepareStateBackend(key, keyCoder);
StateNamespace namespace = StateNamespaces.window(windowCoder, window);
BagState<V> bagState =
stateInternals.state(namespace, StateTags.bag(userStateId, valueCoder));
bagState.clear();
}
}
private void prepareStateBackend(K key, Coder<K> keyCoder) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try {
keyCoder.encode(key, baos);
} catch (IOException e) {
throw new RuntimeException("Failed to encode key for Flink state backend", e);
}
keyedStateBackend.setCurrentKey(ByteBuffer.wrap(baos.toByteArray()));
}
};
}
} | class BagUserStateFactory
implements StateRequestHandlers.BagUserStateHandlerFactory {
private final StateInternals stateInternals;
private final KeyedStateBackend<ByteBuffer> keyedStateBackend;
private BagUserStateFactory(
StateInternals stateInternals, KeyedStateBackend<ByteBuffer> keyedStateBackend) {
this.stateInternals = stateInternals;
this.keyedStateBackend = keyedStateBackend;
}
@Override
public <K, V, W extends BoundedWindow>
StateRequestHandlers.BagUserStateHandler<K, V, W> forUserState(
String pTransformId,
String userStateId,
Coder<K> keyCoder,
Coder<V> valueCoder,
Coder<W> windowCoder) {
return new StateRequestHandlers.BagUserStateHandler<K, V, W>() {
@Override
@Override
public void append(K key, W window, Iterator<V> values) {
prepareStateBackend(key, keyCoder);
StateNamespace namespace = StateNamespaces.window(windowCoder, window);
BagState<V> bagState =
stateInternals.state(namespace, StateTags.bag(userStateId, valueCoder));
while (values.hasNext()) {
bagState.add(values.next());
}
}
@Override
public void clear(K key, W window) {
prepareStateBackend(key, keyCoder);
StateNamespace namespace = StateNamespaces.window(windowCoder, window);
BagState<V> bagState =
stateInternals.state(namespace, StateTags.bag(userStateId, valueCoder));
bagState.clear();
}
private void prepareStateBackend(K key, Coder<K> keyCoder) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try {
keyCoder.encode(key, baos);
} catch (IOException e) {
throw new RuntimeException("Failed to encode key for Flink state backend", e);
}
keyedStateBackend.setCurrentKey(ByteBuffer.wrap(baos.toByteArray()));
}
};
}
} |
Can be use `try (URLClassLoader classLoader = URLClassLoader.newInstance(urls))` | private void analyzeUdfClassInStarrocksJar() throws AnalysisException {
String class_name = properties.get(SYMBOL_KEY);
if (Strings.isNullOrEmpty(class_name)) {
throw new AnalysisException("No '" + SYMBOL_KEY + "' in properties");
}
URLClassLoader classLoader = null;
try {
URL[] urls = {new URL("jar:" + objectFile + "!/")};
classLoader = URLClassLoader.newInstance(urls);
udfClass.setClazz(classLoader.loadClass(class_name));
if (isAggregate) {
String state_class_name = class_name + "$" + STATE_CLASS_NAME;
udfStateClass.setClazz(classLoader.loadClass(state_class_name));
}
udfClass.collectMethods();
if (isAggregate) {
udfStateClass.collectMethods();
}
} catch (MalformedURLException e) {
throw new AnalysisException("Failed to load object_file: " + objectFile);
} catch (ClassNotFoundException e) {
throw new AnalysisException("Class '" + class_name + "' not found in object_file :" + objectFile);
} finally {
if (classLoader != null) {
try {
classLoader.close();
} catch (IOException e) {
}
}
}
} | classLoader = URLClassLoader.newInstance(urls); | private void analyzeUdfClassInStarrocksJar() throws AnalysisException {
String class_name = properties.get(SYMBOL_KEY);
if (Strings.isNullOrEmpty(class_name)) {
throw new AnalysisException("No '" + SYMBOL_KEY + "' in properties");
}
try {
URL[] urls = {new URL("jar:" + objectFile + "!/")};
try (URLClassLoader classLoader = URLClassLoader.newInstance(urls)) {
mainClass.setClazz(classLoader.loadClass(class_name));
if (isAggregate) {
String state_class_name = class_name + "$" + STATE_CLASS_NAME;
udafStateClass.setClazz(classLoader.loadClass(state_class_name));
}
} catch (IOException e) {
throw new AnalysisException("Failed to load object_file: " + objectFile);
} catch (ClassNotFoundException e) {
throw new AnalysisException("Class '" + class_name + "' not found in object_file :" + objectFile);
}
} catch (MalformedURLException e) {
throw new AnalysisException("Object file is invalid: " + objectFile);
}
mainClass.collectMethods();
if (isAggregate) {
udafStateClass.collectMethods();
}
} | class '%s' method '%s' parameter %s[%s] type does not match %s",
clazz.getCanonicalName(), method.getName(), pname, ptype.getCanonicalName(),
cls.getCanonicalName()));
}
}
}
private UDFInternalClass udfClass;
private UDFInternalClass udfStateClass;
public CreateFunctionStmt(boolean isAggregate, FunctionName functionName, FunctionArgsDef argsDef,
TypeDef returnType, TypeDef intermediateType, Map<String, String> properties) {
this.functionName = functionName;
this.isAggregate = isAggregate;
this.argsDef = argsDef;
this.returnType = returnType;
this.intermediateType = intermediateType;
if (properties == null) {
this.properties = ImmutableSortedMap.of();
} else {
this.properties = ImmutableSortedMap.copyOf(properties, String.CASE_INSENSITIVE_ORDER);
}
this.udfClass = new UDFInternalClass();
if (isAggregate) {
this.udfStateClass = new UDFInternalClass();
}
} | class '%s' method '%s' parameter %s[%s] type does not match %s",
clazz.getCanonicalName(), method.getName(), pname, ptype.getCanonicalName(),
cls.getCanonicalName()));
}
}
}
private UDFInternalClass mainClass;
private UDFInternalClass udafStateClass;
public CreateFunctionStmt(boolean isAggregate, FunctionName functionName, FunctionArgsDef argsDef,
TypeDef returnType, TypeDef intermediateType, Map<String, String> properties) {
this.functionName = functionName;
this.isAggregate = isAggregate;
this.argsDef = argsDef;
this.returnType = returnType;
this.intermediateType = intermediateType;
if (properties == null) {
this.properties = ImmutableSortedMap.of();
} else {
this.properties = ImmutableSortedMap.copyOf(properties, String.CASE_INSENSITIVE_ORDER);
}
this.mainClass = new UDFInternalClass();
if (isAggregate) {
this.udafStateClass = new UDFInternalClass();
}
} |
nit: We should not strictly cast it to ArrayList<String> here, instead keep it open to generic `List<String>` here. ``` List<String> expandParam = ((Collection<?>) parameters[paramIndex]).stream().map(Object::toString).collect(Collectors.toList()); ``` | public Object execute(final Object[] parameters) {
final CosmosParameterAccessor accessor = new CosmosParameterParameterAccessor(getQueryMethod(), parameters);
final ResultProcessor processor = getQueryMethod().getResultProcessor().withDynamicProjection(accessor);
String expandedQuery = query;
List<SqlParameter> sqlParameters = new ArrayList<>();
for (int paramIndex = 0; paramIndex < parameters.length; paramIndex++) {
Parameter queryParam = getQueryMethod().getParameters().getParameter(paramIndex);
if (parameters[paramIndex] instanceof Collection) {
ArrayList<String> expandParam = (ArrayList<String>) ((Collection<?>) parameters[paramIndex]).stream()
.map(Object::toString).collect(Collectors.toList());
List<String> expandedParamKeys = new ArrayList<>();
for (int arrayIndex = 0; arrayIndex < expandParam.size(); arrayIndex++) {
String paramName = "@" + queryParam.getName().orElse("") + arrayIndex;
expandedParamKeys.add(paramName);
sqlParameters.add(new SqlParameter(paramName, toCosmosDbValue(expandParam.get(arrayIndex))));
}
expandedQuery = expandedQuery.replaceAll("@" + queryParam.getName().orElse(""), String.join(",", expandedParamKeys));
} else {
if (!Pageable.class.isAssignableFrom(queryParam.getType())
&& !Sort.class.isAssignableFrom(queryParam.getType())) {
sqlParameters.add(new SqlParameter("@" + queryParam.getName().orElse(""), toCosmosDbValue(parameters[paramIndex])));
}
}
}
SqlQuerySpec querySpec = new SqlQuerySpec(expandedQuery, sqlParameters);
if (isPageQuery()) {
return this.operations.runPaginationQuery(querySpec, accessor.getPageable(), processor.getReturnedType().getDomainType(),
processor.getReturnedType().getReturnedType());
} else if (isSliceQuery()) {
return this.operations.runSliceQuery(
querySpec,
accessor.getPageable(),
processor.getReturnedType().getDomainType(),
processor.getReturnedType().getReturnedType());
} else if (isCountQuery()) {
final String container = ((CosmosEntityMetadata<?>) getQueryMethod().getEntityInformation()).getContainerName();
return this.operations.count(querySpec, container);
} else {
return this.operations.runQuery(querySpec, accessor.getSort(), processor.getReturnedType().getDomainType(),
processor.getReturnedType().getReturnedType());
}
} | ArrayList<String> expandParam = (ArrayList<String>) ((Collection<?>) parameters[paramIndex]).stream() | public Object execute(final Object[] parameters) {
final CosmosParameterAccessor accessor = new CosmosParameterParameterAccessor(getQueryMethod(), parameters);
final ResultProcessor processor = getQueryMethod().getResultProcessor().withDynamicProjection(accessor);
String expandedQuery = query;
List<SqlParameter> sqlParameters = new ArrayList<>();
for (int paramIndex = 0; paramIndex < parameters.length; paramIndex++) {
Parameter queryParam = getQueryMethod().getParameters().getParameter(paramIndex);
if (parameters[paramIndex] instanceof Collection) {
List<String> expandParam = ((Collection<?>) parameters[paramIndex]).stream()
.map(Object::toString).collect(Collectors.toList());
List<String> expandedParamKeys = new ArrayList<>();
for (int arrayIndex = 0; arrayIndex < expandParam.size(); arrayIndex++) {
String paramName = "@" + queryParam.getName().orElse("") + arrayIndex;
expandedParamKeys.add(paramName);
sqlParameters.add(new SqlParameter(paramName, toCosmosDbValue(expandParam.get(arrayIndex))));
}
expandedQuery = expandedQuery.replaceAll("@" + queryParam.getName().orElse(""), String.join(",", expandedParamKeys));
} else {
if (!Pageable.class.isAssignableFrom(queryParam.getType())
&& !Sort.class.isAssignableFrom(queryParam.getType())) {
sqlParameters.add(new SqlParameter("@" + queryParam.getName().orElse(""), toCosmosDbValue(parameters[paramIndex])));
}
}
}
SqlQuerySpec querySpec = new SqlQuerySpec(expandedQuery, sqlParameters);
if (isPageQuery()) {
return this.operations.runPaginationQuery(querySpec, accessor.getPageable(), processor.getReturnedType().getDomainType(),
processor.getReturnedType().getReturnedType());
} else if (isSliceQuery()) {
return this.operations.runSliceQuery(
querySpec,
accessor.getPageable(),
processor.getReturnedType().getDomainType(),
processor.getReturnedType().getReturnedType());
} else if (isCountQuery()) {
final String container = ((CosmosEntityMetadata<?>) getQueryMethod().getEntityInformation()).getContainerName();
return this.operations.count(querySpec, container);
} else {
return this.operations.runQuery(querySpec, accessor.getSort(), processor.getReturnedType().getDomainType(),
processor.getReturnedType().getReturnedType());
}
} | class StringBasedCosmosQuery extends AbstractCosmosQuery {
private static final Pattern COUNT_QUERY_PATTERN = Pattern.compile("^\\s*select\\s+value\\s+count.*", Pattern.CASE_INSENSITIVE);
private final String query;
/**
* Constructor
* @param queryMethod the CosmosQueryMethod
* @param dbOperations the CosmosOperations
*/
public StringBasedCosmosQuery(CosmosQueryMethod queryMethod, CosmosOperations dbOperations) {
super(queryMethod, dbOperations);
this.query = queryMethod.getQueryAnnotation();
}
@Override
protected CosmosQuery createQuery(CosmosParameterAccessor accessor) {
return null;
}
@Override
@Override
protected boolean isDeleteQuery() {
return false;
}
@Override
protected boolean isExistsQuery() {
return false;
}
@Override
protected boolean isCountQuery() {
return isCountQuery(query, getQueryMethod().getReturnedObjectType());
}
static boolean isCountQuery(String query, Class<?> returnedType) {
if (isCountQueryReturnType(returnedType)) {
return COUNT_QUERY_PATTERN.matcher(query).matches();
} else {
return false;
}
}
private static boolean isCountQueryReturnType(Class<?> returnedType) {
return returnedType == Long.class
|| returnedType == long.class
|| returnedType == Integer.class
|| returnedType == int.class;
}
} | class StringBasedCosmosQuery extends AbstractCosmosQuery {
private static final Pattern COUNT_QUERY_PATTERN = Pattern.compile("^\\s*select\\s+value\\s+count.*", Pattern.CASE_INSENSITIVE);
private final String query;
/**
* Constructor
* @param queryMethod the CosmosQueryMethod
* @param dbOperations the CosmosOperations
*/
public StringBasedCosmosQuery(CosmosQueryMethod queryMethod, CosmosOperations dbOperations) {
super(queryMethod, dbOperations);
this.query = queryMethod.getQueryAnnotation();
}
@Override
protected CosmosQuery createQuery(CosmosParameterAccessor accessor) {
return null;
}
@Override
@Override
protected boolean isDeleteQuery() {
return false;
}
@Override
protected boolean isExistsQuery() {
return false;
}
@Override
protected boolean isCountQuery() {
return isCountQuery(query, getQueryMethod().getReturnedObjectType());
}
static boolean isCountQuery(String query, Class<?> returnedType) {
if (isCountQueryReturnType(returnedType)) {
return COUNT_QUERY_PATTERN.matcher(query).matches();
} else {
return false;
}
}
private static boolean isCountQueryReturnType(Class<?> returnedType) {
return returnedType == Long.class
|| returnedType == long.class
|| returnedType == Integer.class
|| returnedType == int.class;
}
} |
This causes a breaking change with #30678 due to being out of sync. | public BLangNode transform(ErrorTypeDescriptorNode errorTypeDescriptorNode) {
BLangErrorType errorType = (BLangErrorType) TreeBuilder.createErrorTypeNode();
Optional<TypeParameterNode> typeParam = errorTypeDescriptorNode.errorTypeParamsNode();
errorType.pos = getPosition(errorTypeDescriptorNode);
if (typeParam.isPresent()) {
TypeParameterNode typeNode = typeParam.get();
errorType.detailType = createTypeNode(typeNode);
}
NonTerminalNode parent = errorTypeDescriptorNode.parent();
boolean isDistinctError = parent.kind() == SyntaxKind.DISTINCT_TYPE_DESC;
if (isDistinctError) {
parent = parent.parent();
}
errorType.isAnonymous = checkIfAnonymous(errorTypeDescriptorNode);
errorType.isLocal = this.isInLocalContext;
if (parent.kind() != SyntaxKind.TYPE_DEFINITION
&& (isDistinctError || (!errorType.isLocal && typeParam.isPresent()))) {
return deSugarTypeAsUserDefType(errorType);
}
return errorType;
} | errorType.isAnonymous = checkIfAnonymous(errorTypeDescriptorNode); | public BLangNode transform(ErrorTypeDescriptorNode errorTypeDescriptorNode) {
BLangErrorType errorType = (BLangErrorType) TreeBuilder.createErrorTypeNode();
Optional<TypeParameterNode> typeParam = errorTypeDescriptorNode.errorTypeParamsNode();
errorType.pos = getPosition(errorTypeDescriptorNode);
if (typeParam.isPresent()) {
TypeParameterNode typeNode = typeParam.get();
errorType.detailType = createTypeNode(typeNode);
}
NonTerminalNode parent = errorTypeDescriptorNode.parent();
boolean isDistinctError = parent.kind() == SyntaxKind.DISTINCT_TYPE_DESC;
if (isDistinctError) {
parent = parent.parent();
}
errorType.isAnonymous = checkIfAnonymous(errorTypeDescriptorNode);
errorType.isLocal = this.isInLocalContext;
if (parent.kind() != SyntaxKind.TYPE_DEFINITION
&& (isDistinctError || (!errorType.isLocal && typeParam.isPresent()))) {
return deSugarTypeAsUserDefType(errorType);
}
return errorType;
} | class BLangNodeTransformer extends NodeTransformer<BLangNode> {
private static final String IDENTIFIER_LITERAL_PREFIX = "'";
private BLangDiagnosticLog dlog;
private SymbolTable symTable;
private PackageCache packageCache;
private PackageID packageID;
private String currentCompUnitName;
private BLangCompilationUnit currentCompilationUnit;
private BLangAnonymousModelHelper anonymousModelHelper;
private BLangMissingNodesHelper missingNodesHelper;
/* To keep track of additional statements produced from multi-BLangNode resultant transformations */
private Stack<BLangStatement> additionalStatements = new Stack<>();
/* To keep track if we are inside a block statment for the use of type definition creation */
private boolean isInLocalContext = false;
public BLangNodeTransformer(CompilerContext context,
PackageID packageID, String entryName) {
this.dlog = BLangDiagnosticLog.getInstance(context);
this.dlog.setCurrentPackageId(packageID);
this.symTable = SymbolTable.getInstance(context);
this.packageID = packageID;
this.currentCompUnitName = entryName;
this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context);
this.missingNodesHelper = BLangMissingNodesHelper.getInstance(context);
}
public List<org.ballerinalang.model.tree.Node> accept(Node node) {
BLangNode bLangNode = node.apply(this);
List<org.ballerinalang.model.tree.Node> nodes = new ArrayList<>();
while (!additionalStatements.empty()) {
nodes.add(additionalStatements.pop());
}
nodes.add(bLangNode);
return nodes;
}
@Override
public BLangNode transform(IdentifierToken identifierToken) {
return this.createIdentifier(getPosition(identifierToken), identifierToken);
}
private Optional<Node> getDocumentationString(Optional<MetadataNode> metadataNode) {
return metadataNode.map(MetadataNode::documentationString).orElse(null);
}
private NodeList<AnnotationNode> getAnnotations(Optional<MetadataNode> metadataNode) {
return metadataNode.map(MetadataNode::annotations).orElse(null);
}
private Location getPosition(Node node) {
if (node == null) {
return null;
}
LineRange lineRange = node.lineRange();
LinePosition startPos = lineRange.startLine();
LinePosition endPos = lineRange.endLine();
return new BLangDiagnosticLocation(currentCompUnitName,
startPos.line(),
endPos.line(),
startPos.offset(),
endPos.offset());
}
private Location getPosition(Node startNode, Node endNode) {
if (startNode == null || endNode == null) {
return null;
}
LinePosition startPos = startNode.lineRange().startLine();
LinePosition endPos = endNode.lineRange().endLine();
return new BLangDiagnosticLocation(currentCompUnitName, startPos.line(), endPos.line(),
startPos.offset(), endPos.offset());
}
private Location getPositionWithoutMetadata(Node node) {
if (node == null) {
return null;
}
LineRange nodeLineRange = node.lineRange();
NonTerminalNode nonTerminalNode = (NonTerminalNode) node;
ChildNodeList children = nonTerminalNode.children();
LinePosition startPos;
if (children.get(0).kind() == SyntaxKind.METADATA) {
startPos = children.get(1).lineRange().startLine();
} else {
startPos = nodeLineRange.startLine();
}
LinePosition endPos = nodeLineRange.endLine();
return new BLangDiagnosticLocation(currentCompUnitName,
startPos.line(),
endPos.line(),
startPos.offset(),
endPos.offset());
}
@Override
public BLangNode transform(ModulePartNode modulePart) {
BLangCompilationUnit compilationUnit = (BLangCompilationUnit) TreeBuilder.createCompilationUnit();
this.currentCompilationUnit = compilationUnit;
compilationUnit.name = currentCompUnitName;
compilationUnit.setPackageID(packageID);
Location pos = getPosition(modulePart);
for (ImportDeclarationNode importDecl : modulePart.imports()) {
BLangImportPackage bLangImport = (BLangImportPackage) importDecl.apply(this);
bLangImport.compUnit = this.createIdentifier(pos, compilationUnit.getName());
compilationUnit.addTopLevelNode(bLangImport);
}
for (ModuleMemberDeclarationNode member : modulePart.members()) {
compilationUnit.addTopLevelNode((TopLevelNode) member.apply(this));
}
Location newLocation = new BLangDiagnosticLocation(pos.lineRange().filePath(), 0, 0, 0, 0);
compilationUnit.pos = newLocation;
compilationUnit.setPackageID(packageID);
this.currentCompilationUnit = null;
return compilationUnit;
}
@Override
public BLangNode transform(ModuleVariableDeclarationNode modVarDeclrNode) {
TypedBindingPatternNode typedBindingPattern = modVarDeclrNode.typedBindingPattern();
BindingPatternNode bindingPatternNode = typedBindingPattern.bindingPattern();
BLangVariable variable = getBLangVariableNode(bindingPatternNode);
if (modVarDeclrNode.visibilityQualifier().isPresent()) {
markVariableWithFlag(variable, Flag.PUBLIC);
}
initializeBLangVariable(variable, typedBindingPattern.typeDescriptor(), modVarDeclrNode.initializer(),
modVarDeclrNode.qualifiers());
NodeList<AnnotationNode> annotations = getAnnotations(modVarDeclrNode.metadata());
if (annotations != null) {
variable.annAttachments = applyAll(annotations);
}
variable.pos = getPositionWithoutMetadata(modVarDeclrNode);
variable.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(modVarDeclrNode.metadata()));
return variable;
}
@Override
public BLangNode transform(ImportDeclarationNode importDeclaration) {
ImportOrgNameNode orgNameNode = importDeclaration.orgName().orElse(null);
Optional<ImportPrefixNode> prefixNode = importDeclaration.prefix();
Token prefix = prefixNode.isPresent() ? prefixNode.get().prefix() : null;
Token orgName = null;
if (orgNameNode != null) {
orgName = orgNameNode.orgName();
}
String version = null;
List<BLangIdentifier> pkgNameComps = new ArrayList<>();
NodeList<IdentifierToken> names = importDeclaration.moduleName();
Location position = getPosition(importDeclaration);
names.forEach(name -> pkgNameComps.add(this.createIdentifier(getPosition(name), name.text(), null)));
BLangImportPackage importDcl = (BLangImportPackage) TreeBuilder.createImportPackageNode();
importDcl.pos = position;
importDcl.pkgNameComps = pkgNameComps;
importDcl.orgName = this.createIdentifier(getPosition(orgNameNode), orgName);
importDcl.version = this.createIdentifier(null, version);
importDcl.alias = (prefix != null) ? this.createIdentifier(getPosition(prefix), prefix)
: pkgNameComps.get(pkgNameComps.size() - 1);
return importDcl;
}
@Override
public BLangNode transform(MethodDeclarationNode methodDeclarationNode) {
BLangFunction bLFunction;
if (methodDeclarationNode.relativeResourcePath().isEmpty()) {
bLFunction = createFunctionNode(methodDeclarationNode.methodName(),
methodDeclarationNode.qualifierList(), methodDeclarationNode.methodSignature(), null);
} else {
bLFunction = createResourceFunctionNode(methodDeclarationNode.methodName(),
methodDeclarationNode.qualifierList(), methodDeclarationNode.relativeResourcePath(),
methodDeclarationNode.methodSignature(), null);
}
bLFunction.annAttachments = applyAll(getAnnotations(methodDeclarationNode.metadata()));
bLFunction.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(methodDeclarationNode.metadata()));
bLFunction.pos = getPositionWithoutMetadata(methodDeclarationNode);
return bLFunction;
}
@Override
public BLangNode transform(ResourcePathParameterNode resourcePathParameterNode) {
BLangSimpleVariable pathParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
pathParam.name = createIdentifier(resourcePathParameterNode.paramName());
BLangType typeNode = (BLangType) resourcePathParameterNode.typeDescriptor().apply(this);
pathParam.pos = getPosition(resourcePathParameterNode);
pathParam.annAttachments = applyAll(resourcePathParameterNode.annotations());
if (resourcePathParameterNode.kind() == SyntaxKind.RESOURCE_PATH_REST_PARAM) {
BLangArrayType arrayTypeNode = (BLangArrayType) TreeBuilder.createArrayTypeNode();
arrayTypeNode.elemtype = typeNode;
arrayTypeNode.dimensions = 1;
typeNode = arrayTypeNode;
}
pathParam.typeNode = typeNode;
return pathParam;
}
private BLangFunction createResourceFunctionNode(IdentifierToken accessorName,
NodeList<Token> qualifierList,
NodeList<Node> relativeResourcePath,
FunctionSignatureNode methodSignature,
FunctionBodyNode functionBody) {
BLangResourceFunction bLFunction = (BLangResourceFunction) TreeBuilder.createResourceFunctionNode();
String resourceFuncName = calculateResourceFunctionName(accessorName, relativeResourcePath);
BLangIdentifier name = createIdentifier(getPosition(accessorName), resourceFuncName);
populateFunctionNode(name, qualifierList, methodSignature, functionBody, bLFunction);
bLFunction.methodName = createIdentifier(accessorName);
bLFunction.resourcePath = new ArrayList<>();
List<BLangSimpleVariable> params = new ArrayList<>();
for (Node pathSegment : relativeResourcePath) {
switch (pathSegment.kind()) {
case SLASH_TOKEN:
continue;
case RESOURCE_PATH_SEGMENT_PARAM:
BLangSimpleVariable param = (BLangSimpleVariable) pathSegment.apply(this);
params.add(param);
bLFunction.addPathParam(param);
bLFunction.resourcePath.add(createIdentifier(getPosition(pathSegment), "*"));
break;
case RESOURCE_PATH_REST_PARAM:
BLangSimpleVariable restParam = (BLangSimpleVariable) pathSegment.apply(this);
params.add(restParam);
bLFunction.setRestPathParam(restParam);
bLFunction.resourcePath.add(createIdentifier(getPosition(pathSegment), "**"));
break;
default:
bLFunction.resourcePath.add(createIdentifier((Token) pathSegment));
break;
}
}
bLFunction.getParameters().addAll(0, params);
return bLFunction;
}
private String calculateResourceFunctionName(IdentifierToken accessorName, NodeList<Node> relativeResourcePath) {
StringBuilder sb = new StringBuilder();
sb.append("$");
sb.append(createIdentifier(accessorName).getValue());
for (Node token : relativeResourcePath) {
switch (token.kind()) {
case SLASH_TOKEN:
continue;
case RESOURCE_PATH_SEGMENT_PARAM:
sb.append("$*");
break;
case RESOURCE_PATH_REST_PARAM:
sb.append("$**");
break;
default:
sb.append("$");
String value = createIdentifier((Token) token).getValue();
sb.append(value);
}
}
return sb.toString();
}
@Override
public BLangNode transform(ConstantDeclarationNode constantDeclarationNode) {
BLangConstant constantNode = (BLangConstant) TreeBuilder.createConstantNode();
Location pos = getPositionWithoutMetadata(constantDeclarationNode);
Location identifierPos = getPosition(constantDeclarationNode.variableName());
constantNode.name = createIdentifier(identifierPos, constantDeclarationNode.variableName());
constantNode.expr = createExpression(constantDeclarationNode.initializer());
constantNode.pos = pos;
if (constantDeclarationNode.typeDescriptor().isPresent()) {
constantNode.typeNode = createTypeNode(constantDeclarationNode.typeDescriptor().orElse(null));
}
constantNode.annAttachments = applyAll(getAnnotations(constantDeclarationNode.metadata()));
constantNode.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(constantDeclarationNode.metadata()));
constantNode.flagSet.add(Flag.CONSTANT);
if (constantDeclarationNode.visibilityQualifier().isPresent() &&
constantDeclarationNode.visibilityQualifier().orElse(null).kind() == SyntaxKind.PUBLIC_KEYWORD) {
constantNode.flagSet.add(Flag.PUBLIC);
}
NodeKind nodeKind = constantNode.expr.getKind();
if (nodeKind == NodeKind.LITERAL || nodeKind == NodeKind.NUMERIC_LITERAL) {
BLangLiteral literal = nodeKind == NodeKind.LITERAL ?
(BLangLiteral) TreeBuilder.createLiteralExpression() :
(BLangLiteral) TreeBuilder.createNumericLiteralExpression();
literal.setValue(((BLangLiteral) constantNode.expr).value);
literal.type = constantNode.expr.type;
literal.isConstant = true;
BLangFiniteTypeNode finiteTypeNode = (BLangFiniteTypeNode) TreeBuilder.createFiniteTypeNode();
finiteTypeNode.valueSpace.add(literal);
BLangTypeDefinition typeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
String genName = anonymousModelHelper.getNextAnonymousTypeKey(packageID);
IdentifierNode anonTypeGenName = createIdentifier(identifierPos, genName);
typeDef.setName(anonTypeGenName);
typeDef.flagSet.add(Flag.PUBLIC);
typeDef.flagSet.add(Flag.ANONYMOUS);
typeDef.typeNode = finiteTypeNode;
typeDef.pos = pos;
constantNode.associatedTypeDefinition = typeDef;
}
return constantNode;
}
public BLangNode transform(TypeDefinitionNode typeDefNode) {
BLangTypeDefinition typeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
BLangIdentifier identifierNode =
this.createIdentifier(typeDefNode.typeName());
typeDef.setName(identifierNode);
typeDef.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(typeDefNode.metadata()));
typeDef.typeNode = createTypeNode(typeDefNode.typeDescriptor());
typeDefNode.visibilityQualifier().ifPresent(visibilityQual -> {
if (visibilityQual.kind() == SyntaxKind.PUBLIC_KEYWORD) {
typeDef.flagSet.add(Flag.PUBLIC);
}
});
typeDef.pos = getPositionWithoutMetadata(typeDefNode);
typeDef.annAttachments = applyAll(getAnnotations(typeDefNode.metadata()));
return typeDef;
}
@Override
public BLangNode transform(UnionTypeDescriptorNode unionTypeDescriptorNode) {
List<TypeDescriptorNode> nodes = flattenUnionType(unionTypeDescriptorNode);
List<TypeDescriptorNode> finiteTypeElements = new ArrayList<>();
List<List<TypeDescriptorNode>> unionTypeElementsCollection = new ArrayList<>();
for (TypeDescriptorNode type : nodes) {
if (type.kind() == SyntaxKind.SINGLETON_TYPE_DESC) {
finiteTypeElements.add(type);
unionTypeElementsCollection.add(new ArrayList<>());
} else {
List<TypeDescriptorNode> lastOfOthers;
if (unionTypeElementsCollection.isEmpty()) {
lastOfOthers = new ArrayList<>();
unionTypeElementsCollection.add(lastOfOthers);
} else {
lastOfOthers = unionTypeElementsCollection.get(unionTypeElementsCollection.size() - 1);
}
lastOfOthers.add(type);
}
}
List<TypeDescriptorNode> unionElements = new ArrayList<>();
reverseFlatMap(unionTypeElementsCollection, unionElements);
BLangFiniteTypeNode bLangFiniteTypeNode = (BLangFiniteTypeNode) TreeBuilder.createFiniteTypeNode();
for (TypeDescriptorNode finiteTypeEl : finiteTypeElements) {
SingletonTypeDescriptorNode singletonTypeNode = (SingletonTypeDescriptorNode) finiteTypeEl;
BLangLiteral literal = createSimpleLiteral(singletonTypeNode.simpleContExprNode(), true);
bLangFiniteTypeNode.addValue(literal);
}
if (unionElements.isEmpty()) {
return bLangFiniteTypeNode;
}
BLangUnionTypeNode unionTypeNode = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode();
unionTypeNode.pos = getPosition(unionTypeDescriptorNode);
for (TypeDescriptorNode unionElement : unionElements) {
unionTypeNode.memberTypeNodes.add(createTypeNode(unionElement));
}
if (!finiteTypeElements.isEmpty()) {
unionTypeNode.memberTypeNodes.add(deSugarTypeAsUserDefType(bLangFiniteTypeNode));
}
return unionTypeNode;
}
private List<TypeDescriptorNode> flattenUnionType(UnionTypeDescriptorNode unionTypeDescriptorNode) {
List<TypeDescriptorNode> list = new ArrayList<>();
flattenUnionType(list, unionTypeDescriptorNode);
return list;
}
private void flattenUnionType(List<TypeDescriptorNode> list, TypeDescriptorNode typeDescriptorNode) {
if (typeDescriptorNode.kind() != SyntaxKind.UNION_TYPE_DESC) {
list.add(typeDescriptorNode);
return;
}
UnionTypeDescriptorNode unionTypeDescriptorNode = (UnionTypeDescriptorNode) typeDescriptorNode;
updateListWithNonUnionTypes(list, unionTypeDescriptorNode.leftTypeDesc());
updateListWithNonUnionTypes(list, unionTypeDescriptorNode.rightTypeDesc());
}
private void updateListWithNonUnionTypes(List<TypeDescriptorNode> list, TypeDescriptorNode typeDescNode) {
if (typeDescNode.kind() != SyntaxKind.UNION_TYPE_DESC) {
list.add(typeDescNode);
} else {
flattenUnionType(list, typeDescNode);
}
}
private <T> void reverseFlatMap(List<List<T>> listOfLists, List<T> result) {
for (int i = listOfLists.size() - 1; i >= 0; i--) {
result.addAll(listOfLists.get(i));
}
}
private BLangUserDefinedType deSugarTypeAsUserDefType(BLangType toIndirect) {
BLangTypeDefinition bLTypeDef = createTypeDefinitionWithTypeNode(toIndirect);
Location pos = toIndirect.pos;
addToTop(bLTypeDef);
return createUserDefinedType(pos, (BLangIdentifier) TreeBuilder.createIdentifierNode(), bLTypeDef.name);
}
private BLangTypeDefinition createTypeDefinitionWithTypeNode(BLangType toIndirect) {
Location pos = toIndirect.pos;
BLangTypeDefinition bLTypeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
String genName = anonymousModelHelper.getNextAnonymousTypeKey(packageID);
IdentifierNode anonTypeGenName = createIdentifier(symTable.builtinPos, genName);
bLTypeDef.setName(anonTypeGenName);
bLTypeDef.flagSet.add(Flag.PUBLIC);
bLTypeDef.flagSet.add(Flag.ANONYMOUS);
bLTypeDef.typeNode = toIndirect;
bLTypeDef.pos = pos;
return bLTypeDef;
}
@Override
public BLangNode transform(ParenthesisedTypeDescriptorNode parenthesisedTypeDescriptorNode) {
BLangType typeNode = createTypeNode(parenthesisedTypeDescriptorNode.typedesc());
typeNode.grouped = true;
return typeNode;
}
@Override
public BLangNode transform(TypeParameterNode typeParameterNode) {
return createTypeNode(typeParameterNode.typeNode());
}
@Override
public BLangNode transform(TupleTypeDescriptorNode tupleTypeDescriptorNode) {
BLangTupleTypeNode tupleTypeNode = (BLangTupleTypeNode) TreeBuilder.createTupleTypeNode();
SeparatedNodeList<Node> types = tupleTypeDescriptorNode.memberTypeDesc();
for (int i = 0; i < types.size(); i++) {
Node node = types.get(i);
if (node.kind() == SyntaxKind.REST_TYPE) {
RestDescriptorNode restDescriptor = (RestDescriptorNode) node;
tupleTypeNode.restParamType = createTypeNode(restDescriptor.typeDescriptor());
} else {
tupleTypeNode.memberTypeNodes.add(createTypeNode(node));
}
}
tupleTypeNode.pos = getPosition(tupleTypeDescriptorNode);
return tupleTypeNode;
}
@Override
private boolean isAnonymousTypeNode(TypeParameterNode typeNode) {
SyntaxKind paramKind = typeNode.typeNode().kind();
if (paramKind == SyntaxKind.RECORD_TYPE_DESC || paramKind == SyntaxKind.OBJECT_TYPE_DESC
|| paramKind == SyntaxKind.ERROR_TYPE_DESC) {
return checkIfAnonymous(typeNode);
}
return false;
}
@Override
public BLangNode transform(DistinctTypeDescriptorNode distinctTypeDesc) {
BLangType typeNode = createTypeNode(distinctTypeDesc.typeDescriptor());
typeNode.flagSet.add(Flag.DISTINCT);
return typeNode;
}
@Override
public BLangNode transform(ObjectTypeDescriptorNode objTypeDescNode) {
BLangObjectTypeNode objectTypeNode = (BLangObjectTypeNode) TreeBuilder.createObjectTypeNode();
for (Token qualifier : objTypeDescNode.objectTypeQualifiers()) {
SyntaxKind kind = qualifier.kind();
if (kind == SyntaxKind.CLIENT_KEYWORD) {
objectTypeNode.flagSet.add(Flag.CLIENT);
continue;
}
if (kind == SyntaxKind.SERVICE_KEYWORD) {
objectTypeNode.flagSet.add(SERVICE);
continue;
}
if (kind == SyntaxKind.ISOLATED_KEYWORD) {
objectTypeNode.flagSet.add(ISOLATED);
continue;
}
throw new RuntimeException("Syntax kind is not supported: " + kind);
}
NodeList<Node> members = objTypeDescNode.members();
for (Node node : members) {
BLangNode bLangNode = node.apply(this);
if (bLangNode.getKind() == NodeKind.FUNCTION) {
BLangFunction bLangFunction = (BLangFunction) bLangNode;
bLangFunction.attachedFunction = true;
bLangFunction.flagSet.add(Flag.ATTACHED);
if (Names.USER_DEFINED_INIT_SUFFIX.value.equals(bLangFunction.name.value)) {
if (objectTypeNode.initFunction == null) {
bLangFunction.objInitFunction = true;
objectTypeNode.initFunction = bLangFunction;
} else {
objectTypeNode.addFunction(bLangFunction);
}
} else {
objectTypeNode.addFunction(bLangFunction);
}
} else if (bLangNode.getKind() == NodeKind.RESOURCE_FUNC) {
BLangFunction bLangFunction = (BLangFunction) bLangNode;
bLangFunction.attachedFunction = true;
bLangFunction.flagSet.add(Flag.ATTACHED);
objectTypeNode.addFunction(bLangFunction);
dlog.error(getPosition(node), DiagnosticErrorCode.OBJECT_TYPE_DEF_DOES_NOT_ALLOW_RESOURCE_FUNC_DECL);
} else if (bLangNode.getKind() == NodeKind.VARIABLE) {
objectTypeNode.addField((BLangSimpleVariable) bLangNode);
} else if (bLangNode.getKind() == NodeKind.USER_DEFINED_TYPE) {
objectTypeNode.addTypeReference((BLangType) bLangNode);
}
}
objectTypeNode.pos = getPosition(objTypeDescNode);
if (members.size() > 0) {
objectTypeNode.pos = trimLeft(objectTypeNode.pos, getPosition(members.get(0)));
objectTypeNode.pos = trimRight(objectTypeNode.pos, getPosition(members.get(members.size() - 1)));
} else {
objectTypeNode.pos = trimLeft(objectTypeNode.pos, getPosition(objTypeDescNode.closeBrace()));
objectTypeNode.pos = trimRight(objectTypeNode.pos, getPosition(objTypeDescNode.openBrace()));
}
boolean isAnonymous = checkIfAnonymous(objTypeDescNode);
objectTypeNode.isAnonymous = isAnonymous;
if (!isAnonymous) {
return objectTypeNode;
}
return deSugarTypeAsUserDefType(objectTypeNode);
}
public BLangClassDefinition transformObjectCtorExpressionBody(NodeList<Node> members) {
BLangClassDefinition classDefinition = (BLangClassDefinition) TreeBuilder.createClassDefNode();
classDefinition.flagSet.add(Flag.ANONYMOUS);
classDefinition.flagSet.add(Flag.OBJECT_CTOR);
for (Node node : members) {
BLangNode bLangNode = node.apply(this);
NodeKind nodeKind = bLangNode.getKind();
if (nodeKind == NodeKind.FUNCTION || bLangNode.getKind() == NodeKind.RESOURCE_FUNC) {
BLangFunction bLangFunction = (BLangFunction) bLangNode;
bLangFunction.attachedFunction = true;
bLangFunction.flagSet.add(Flag.ATTACHED);
if (!Names.USER_DEFINED_INIT_SUFFIX.value.equals(bLangFunction.name.value)) {
classDefinition.addFunction(bLangFunction);
continue;
}
if (classDefinition.initFunction != null) {
classDefinition.addFunction(bLangFunction);
continue;
}
if (bLangFunction.requiredParams.size() != 0) {
dlog.error(bLangFunction.pos, DiagnosticErrorCode.OBJECT_CTOR_INIT_CANNOT_HAVE_PARAMETERS);
continue;
}
bLangFunction.objInitFunction = true;
classDefinition.initFunction = bLangFunction;
} else if (nodeKind == NodeKind.VARIABLE) {
classDefinition.addField((BLangSimpleVariable) bLangNode);
} else if (nodeKind == NodeKind.USER_DEFINED_TYPE) {
dlog.error(bLangNode.pos, DiagnosticErrorCode.OBJECT_CTOR_DOES_NOT_SUPPORT_TYPE_REFERENCE_MEMBERS);
}
}
classDefinition.internal = true;
return classDefinition;
}
/**
* Object constructor expression creates a class definition for the type defined through the object constructor.
* Then add the class definition as a top level node. Using the class definition initialize the object defined in
* the object constructor. Therefore this can be considered as a desugar.
* example:
* var objVariable = object { int n; };
*
* class anonType0 { int n; }
* var objVariable = new anonType0();
*
* @param objectConstructorExpressionNode object ctor expression node
* @return BLangTypeInit node which initialize the class definition
*/
@Override
public BLangNode transform(ObjectConstructorExpressionNode objectConstructorExpressionNode) {
Location pos = getPositionWithoutMetadata(objectConstructorExpressionNode);
BLangClassDefinition anonClass = transformObjectCtorExpressionBody(objectConstructorExpressionNode.members());
anonClass.pos = pos;
BLangObjectConstructorExpression objectCtorExpression = TreeBuilder.createObjectCtorExpression();
objectCtorExpression.pos = pos;
objectCtorExpression.classNode = anonClass;
String genName = anonymousModelHelper.getNextAnonymousTypeKey(packageID);
IdentifierNode anonTypeGenName = createIdentifier(pos, genName);
anonClass.setName(anonTypeGenName);
anonClass.flagSet.add(Flag.PUBLIC);
Optional<TypeDescriptorNode> typeReference = objectConstructorExpressionNode.typeReference();
typeReference.ifPresent(typeReferenceNode -> {
objectCtorExpression.addTypeReference(createTypeNode(typeReferenceNode));
});
anonClass.annAttachments = applyAll(objectConstructorExpressionNode.annotations());
addToTop(anonClass);
NodeList<Token> objectConstructorQualifierList = objectConstructorExpressionNode.objectTypeQualifiers();
for (Token qualifier : objectConstructorQualifierList) {
SyntaxKind kind = qualifier.kind();
if (kind == SyntaxKind.CLIENT_KEYWORD) {
anonClass.flagSet.add(Flag.CLIENT);
objectCtorExpression.isClient = true;
} else if (kind == SyntaxKind.ISOLATED_KEYWORD) {
anonClass.flagSet.add(Flag.ISOLATED);
} else if (qualifier.kind() == SyntaxKind.SERVICE_KEYWORD) {
anonClass.flagSet.add(SERVICE);
objectCtorExpression.isService = true;
} else {
throw new RuntimeException("Syntax kind is not supported: " + kind);
}
}
BLangIdentifier identifier = (BLangIdentifier) TreeBuilder.createIdentifierNode();
BLangUserDefinedType userDefinedType = createUserDefinedType(pos, identifier, anonClass.name);
BLangTypeInit initNode = (BLangTypeInit) TreeBuilder.createInitNode();
initNode.pos = pos;
initNode.userDefinedType = userDefinedType;
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier pkgAlias = createIdentifier(pos, "");
BLangNameReference nameReference = new BLangNameReference(pos, null, pkgAlias, createIdentifier(pos, genName));
invocationNode.name = (BLangIdentifier) nameReference.name;
invocationNode.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
initNode.argsExpr.addAll(invocationNode.argExprs);
initNode.initInvocation = invocationNode;
objectCtorExpression.typeInit = initNode;
return objectCtorExpression;
}
@Override
public BLangNode transform(ObjectFieldNode objFieldNode) {
BLangSimpleVariable simpleVar = createSimpleVar(objFieldNode.fieldName(), objFieldNode.typeName(),
objFieldNode.expression().orElse(null), objFieldNode.visibilityQualifier().orElse(null),
getAnnotations(objFieldNode.metadata()));
Optional<Node> doc = getDocumentationString(objFieldNode.metadata());
simpleVar.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(doc);
NodeList<Token> qualifierList = objFieldNode.qualifierList();
for (Token token : qualifierList) {
if (token.kind() == SyntaxKind.FINAL_KEYWORD) {
addFinalQualifier(simpleVar);
} else if (token.kind() == SyntaxKind.RESOURCE_KEYWORD) {
addResourceQualifier(simpleVar);
}
}
simpleVar.flagSet.add(Flag.FIELD);
simpleVar.pos = getPositionWithoutMetadata(objFieldNode);
return simpleVar;
}
private void addResourceQualifier(BLangSimpleVariable simpleVar) {
simpleVar.flagSet.add(Flag.RESOURCE);
}
@Override
public BLangNode transform(ExpressionFunctionBodyNode expressionFunctionBodyNode) {
BLangExprFunctionBody bLExprFunctionBody = (BLangExprFunctionBody) TreeBuilder.createExprFunctionBodyNode();
bLExprFunctionBody.expr = createExpression(expressionFunctionBodyNode.expression());
bLExprFunctionBody.pos = getPosition(expressionFunctionBodyNode);
return bLExprFunctionBody;
}
@Override
public BLangNode transform(RecordTypeDescriptorNode recordTypeDescriptorNode) {
BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) TreeBuilder.createRecordTypeNode();
boolean hasRestField = false;
boolean isAnonymous = checkIfAnonymous(recordTypeDescriptorNode);
for (Node field : recordTypeDescriptorNode.fields()) {
if (field.kind() == SyntaxKind.RECORD_FIELD) {
BLangSimpleVariable bLFiled = (BLangSimpleVariable) field.apply(this);
Optional<Node> doc = getDocumentationString(((RecordFieldNode) field).metadata());
bLFiled.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(doc);
recordTypeNode.fields.add(bLFiled);
} else if (field.kind() == SyntaxKind.RECORD_FIELD_WITH_DEFAULT_VALUE) {
BLangSimpleVariable bLFiled = (BLangSimpleVariable) field.apply(this);
Optional<Node> doc = getDocumentationString(((RecordFieldWithDefaultValueNode) field).metadata());
bLFiled.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(doc);
recordTypeNode.fields.add(bLFiled);
} else {
recordTypeNode.addTypeReference(createTypeNode(field));
}
}
Optional<RecordRestDescriptorNode> recordRestDesc = recordTypeDescriptorNode.recordRestDescriptor();
if (recordRestDesc.isPresent()) {
recordTypeNode.restFieldType = createTypeNode(recordRestDesc.get());
hasRestField = true;
}
boolean isOpen = recordTypeDescriptorNode.bodyStartDelimiter().kind() == SyntaxKind.OPEN_BRACE_TOKEN;
recordTypeNode.sealed = !(hasRestField || isOpen);
recordTypeNode.pos = getPosition(recordTypeDescriptorNode);
recordTypeNode.isAnonymous = isAnonymous;
recordTypeNode.isLocal = this.isInLocalContext;
if (!isAnonymous || this.isInLocalContext) {
return recordTypeNode;
}
return createAnonymousRecordType(recordTypeDescriptorNode, recordTypeNode);
}
@Override
public BLangNode transform(SingletonTypeDescriptorNode singletonTypeDescriptorNode) {
BLangFiniteTypeNode bLangFiniteTypeNode = new BLangFiniteTypeNode();
BLangLiteral simpleLiteral = createSimpleLiteral(singletonTypeDescriptorNode.simpleContExprNode());
bLangFiniteTypeNode.pos = simpleLiteral.pos;
bLangFiniteTypeNode.valueSpace.add(simpleLiteral);
return bLangFiniteTypeNode;
}
@Override
public BLangNode transform(BuiltinSimpleNameReferenceNode singletonTypeDescriptorNode) {
return createTypeNode(singletonTypeDescriptorNode);
}
@Override
public BLangNode transform(TypeReferenceNode typeReferenceNode) {
return createTypeNode(typeReferenceNode.typeName());
}
@Override
public BLangNode transform(RecordFieldNode recordFieldNode) {
BLangSimpleVariable simpleVar = createSimpleVar(recordFieldNode.fieldName(), recordFieldNode.typeName(),
getAnnotations(recordFieldNode.metadata()));
simpleVar.flagSet.add(Flag.PUBLIC);
if (recordFieldNode.questionMarkToken().isPresent()) {
simpleVar.flagSet.add(Flag.OPTIONAL);
} else {
simpleVar.flagSet.add(Flag.REQUIRED);
}
simpleVar.flagSet.add(Flag.FIELD);
addReadOnlyQualifier(recordFieldNode.readonlyKeyword(), simpleVar);
simpleVar.pos = getPositionWithoutMetadata(recordFieldNode);
return simpleVar;
}
@Override
public BLangNode transform(RecordFieldWithDefaultValueNode recordFieldNode) {
BLangSimpleVariable simpleVar = createSimpleVar(recordFieldNode.fieldName(), recordFieldNode.typeName(),
getAnnotations(recordFieldNode.metadata()));
simpleVar.flagSet.add(Flag.PUBLIC);
if (isPresent(recordFieldNode.expression())) {
simpleVar.setInitialExpression(createExpression(recordFieldNode.expression()));
}
addReadOnlyQualifier(recordFieldNode.readonlyKeyword(), simpleVar);
simpleVar.pos = getPositionWithoutMetadata(recordFieldNode);
return simpleVar;
}
private void addReadOnlyQualifier(Optional<Token> readonlyKeyword, BLangSimpleVariable simpleVar) {
if (readonlyKeyword.isPresent()) {
simpleVar.flagSet.add(Flag.READONLY);
}
}
@Override
public BLangNode transform(RecordRestDescriptorNode recordFieldNode) {
return createTypeNode(recordFieldNode.typeName());
}
@Override
public BLangNode transform(FunctionDefinitionNode funcDefNode) {
BLangFunction bLFunction;
if (funcDefNode.relativeResourcePath().isEmpty()) {
bLFunction = createFunctionNode(funcDefNode.functionName(), funcDefNode.qualifierList(),
funcDefNode.functionSignature(), funcDefNode.functionBody());
} else {
bLFunction = createResourceFunctionNode(funcDefNode.functionName(),
funcDefNode.qualifierList(), funcDefNode.relativeResourcePath(),
funcDefNode.functionSignature(), funcDefNode.functionBody());
}
bLFunction.annAttachments = applyAll(getAnnotations(funcDefNode.metadata()));
bLFunction.pos = getPositionWithoutMetadata(funcDefNode);
bLFunction.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(funcDefNode.metadata()));
return bLFunction;
}
private BLangFunction createFunctionNode(IdentifierToken funcName, NodeList<Token> qualifierList,
FunctionSignatureNode functionSignature, FunctionBodyNode functionBody) {
BLangFunction bLFunction = (BLangFunction) TreeBuilder.createFunctionNode();
BLangIdentifier name = createIdentifier(getPosition(funcName), funcName);
populateFunctionNode(name, qualifierList, functionSignature, functionBody, bLFunction);
return bLFunction;
}
private void populateFunctionNode(BLangIdentifier name, NodeList<Token> qualifierList,
FunctionSignatureNode functionSignature, FunctionBodyNode functionBody,
BLangFunction bLFunction) {
bLFunction.name = name;
setFunctionQualifiers(bLFunction, qualifierList);
populateFuncSignature(bLFunction, functionSignature);
if (functionBody == null) {
bLFunction.body = null;
bLFunction.flagSet.add(Flag.INTERFACE);
bLFunction.interfaceFunction = true;
} else {
bLFunction.body = (BLangFunctionBody) functionBody.apply(this);
if (bLFunction.body.getKind() == NodeKind.EXTERN_FUNCTION_BODY) {
bLFunction.flagSet.add(Flag.NATIVE);
}
}
}
private void setFunctionQualifiers(BLangFunction bLFunction, NodeList<Token> qualifierList) {
for (Token qualifier : qualifierList) {
switch (qualifier.kind()) {
case PUBLIC_KEYWORD:
bLFunction.flagSet.add(Flag.PUBLIC);
break;
case PRIVATE_KEYWORD:
bLFunction.flagSet.add(Flag.PRIVATE);
break;
case REMOTE_KEYWORD:
bLFunction.flagSet.add(Flag.REMOTE);
break;
case TRANSACTIONAL_KEYWORD:
bLFunction.flagSet.add(Flag.TRANSACTIONAL);
break;
case RESOURCE_KEYWORD:
bLFunction.flagSet.add(Flag.RESOURCE);
break;
case ISOLATED_KEYWORD:
bLFunction.flagSet.add(Flag.ISOLATED);
break;
default:
continue;
}
}
}
@Override
public BLangNode transform(ExternalFunctionBodyNode externalFunctionBodyNode) {
BLangExternalFunctionBody externFunctionBodyNode =
(BLangExternalFunctionBody) TreeBuilder.createExternFunctionBodyNode();
externFunctionBodyNode.annAttachments = applyAll(externalFunctionBodyNode.annotations());
externFunctionBodyNode.pos = getPosition(externalFunctionBodyNode);
return externFunctionBodyNode;
}
@Override
public BLangNode transform(ExplicitAnonymousFunctionExpressionNode anonFuncExprNode) {
BLangFunction bLFunction = (BLangFunction) TreeBuilder.createFunctionNode();
Location pos = getPosition(anonFuncExprNode);
bLFunction.name = createIdentifier(symTable.builtinPos,
anonymousModelHelper.getNextAnonymousFunctionKey(packageID));
populateFuncSignature(bLFunction, anonFuncExprNode.functionSignature());
bLFunction.body = (BLangFunctionBody) anonFuncExprNode.functionBody().apply(this);
bLFunction.pos = pos;
bLFunction.addFlag(Flag.LAMBDA);
bLFunction.addFlag(Flag.ANONYMOUS);
setFunctionQualifiers(bLFunction, anonFuncExprNode.qualifierList());
addToTop(bLFunction);
BLangLambdaFunction lambdaExpr = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaExpr.function = bLFunction;
lambdaExpr.pos = pos;
return lambdaExpr;
}
@Override
public BLangNode transform(FunctionBodyBlockNode functionBodyBlockNode) {
BLangBlockFunctionBody bLFuncBody = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
this.isInLocalContext = true;
List<BLangStatement> statements = new ArrayList<>();
if (functionBodyBlockNode.namedWorkerDeclarator().isPresent()) {
NamedWorkerDeclarator namedWorkerDeclarator = functionBodyBlockNode.namedWorkerDeclarator().get();
generateAndAddBLangStatements(namedWorkerDeclarator.workerInitStatements(), statements);
for (NamedWorkerDeclarationNode workerDeclarationNode : namedWorkerDeclarator.namedWorkerDeclarations()) {
statements.add((BLangStatement) workerDeclarationNode.apply(this));
while (!this.additionalStatements.empty()) {
statements.add(additionalStatements.pop());
}
}
}
generateAndAddBLangStatements(functionBodyBlockNode.statements(), statements);
bLFuncBody.stmts = statements;
bLFuncBody.pos = getPosition(functionBodyBlockNode);
this.isInLocalContext = false;
return bLFuncBody;
}
@Override
public BLangNode transform(ForEachStatementNode forEachStatementNode) {
BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();
foreach.pos = getPosition(forEachStatementNode);
TypedBindingPatternNode typedBindingPatternNode = forEachStatementNode.typedBindingPattern();
VariableDefinitionNode variableDefinitionNode = createBLangVarDef(getPosition(typedBindingPatternNode),
typedBindingPatternNode, Optional.empty(), Optional.empty());
foreach.setVariableDefinitionNode(variableDefinitionNode);
foreach.isDeclaredWithVar = typedBindingPatternNode.typeDescriptor().kind() == SyntaxKind.VAR_TYPE_DESC;
BLangBlockStmt foreachBlock = (BLangBlockStmt) forEachStatementNode.blockStatement().apply(this);
foreachBlock.pos = getPosition(forEachStatementNode.blockStatement());
foreach.setBody(foreachBlock);
foreach.setCollection(createExpression(forEachStatementNode.actionOrExpressionNode()));
forEachStatementNode.onFailClause().ifPresent(onFailClauseNode -> {
foreach.setOnFailClause(
(org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this)));
});
return foreach;
}
@Override
public BLangNode transform(ForkStatementNode forkStatementNode) {
BLangForkJoin forkJoin = (BLangForkJoin) TreeBuilder.createForkJoinNode();
Location forkStmtPos = getPosition(forkStatementNode);
forkJoin.pos = forkStmtPos;
return forkJoin;
}
@Override
public BLangNode transform(NamedWorkerDeclarationNode namedWorkerDeclNode) {
BLangFunction bLFunction = (BLangFunction) TreeBuilder.createFunctionNode();
Location workerBodyPos = getPosition(namedWorkerDeclNode.workerBody());
bLFunction.name = createIdentifier(symTable.builtinPos,
anonymousModelHelper.getNextAnonymousFunctionKey(packageID));
BLangBlockStmt blockStmt = (BLangBlockStmt) namedWorkerDeclNode.workerBody().apply(this);
BLangBlockFunctionBody bodyNode = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
bodyNode.stmts = blockStmt.stmts;
bodyNode.pos = workerBodyPos;
bLFunction.body = bodyNode;
bLFunction.internal = true;
bLFunction.pos = workerBodyPos;
bLFunction.addFlag(Flag.LAMBDA);
bLFunction.addFlag(Flag.ANONYMOUS);
bLFunction.addFlag(Flag.WORKER);
if (namedWorkerDeclNode.transactionalKeyword().isPresent()) {
bLFunction.addFlag(Flag.TRANSACTIONAL);
}
String workerName;
if (namedWorkerDeclNode.workerName().isMissing()) {
workerName = missingNodesHelper.getNextMissingNodeName(packageID);
} else {
workerName = namedWorkerDeclNode.workerName().text();
}
if (workerName.startsWith(IDENTIFIER_LITERAL_PREFIX)) {
bLFunction.defaultWorkerName.originalValue = workerName;
workerName = IdentifierUtils.unescapeUnicodeCodepoints(workerName.substring(1));
}
bLFunction.defaultWorkerName.value = workerName;
bLFunction.defaultWorkerName.pos = getPosition(namedWorkerDeclNode.workerName());
NodeList<AnnotationNode> annotations = namedWorkerDeclNode.annotations();
bLFunction.annAttachments = applyAll(annotations);
Optional<Node> retNode = namedWorkerDeclNode.returnTypeDesc();
if (retNode.isPresent()) {
ReturnTypeDescriptorNode returnType = (ReturnTypeDescriptorNode) retNode.get();
bLFunction.setReturnTypeNode(createTypeNode(returnType.type()));
} else {
BLangValueType bLValueType = (BLangValueType) TreeBuilder.createValueTypeNode();
bLValueType.pos = getPosition(namedWorkerDeclNode);
bLValueType.typeKind = TypeKind.NIL;
bLFunction.setReturnTypeNode(bLValueType);
}
addToTop(bLFunction);
BLangLambdaFunction lambdaExpr = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaExpr.function = bLFunction;
lambdaExpr.pos = workerBodyPos;
lambdaExpr.internal = true;
String workerLambdaName = WORKER_LAMBDA_VAR_PREFIX + workerName;
Location workerNamePos = getPosition(namedWorkerDeclNode.workerName());
BLangSimpleVariable var = new SimpleVarBuilder()
.with(workerLambdaName, workerNamePos)
.setExpression(lambdaExpr)
.isDeclaredWithVar()
.isFinal()
.build();
if (namedWorkerDeclNode.transactionalKeyword().isPresent()) {
var.addFlag(Flag.TRANSACTIONAL);
}
BLangSimpleVariableDef lamdaWrkr = (BLangSimpleVariableDef) TreeBuilder.createSimpleVariableDefinitionNode();
lamdaWrkr.pos = workerBodyPos;
var.pos = workerBodyPos;
lamdaWrkr.setVariable(var);
lamdaWrkr.isWorker = true;
lamdaWrkr.internal = var.internal = true;
if (namedWorkerDeclNode.parent().kind() == SyntaxKind.FORK_STATEMENT) {
lamdaWrkr.isInFork = true;
lamdaWrkr.var.flagSet.add(Flag.FORKED);
}
BLangInvocation bLInvocation = (BLangInvocation) TreeBuilder.createActionInvocation();
BLangIdentifier nameInd = this.createIdentifier(workerNamePos, workerLambdaName);
BLangNameReference reference = new BLangNameReference(workerNamePos, null, TreeBuilder.createIdentifierNode(),
nameInd);
bLInvocation.pkgAlias = (BLangIdentifier) reference.pkgAlias;
bLInvocation.name = (BLangIdentifier) reference.name;
bLInvocation.pos = workerNamePos;
bLInvocation.flagSet = new HashSet<>();
bLInvocation.annAttachments = bLFunction.annAttachments;
if (bLInvocation.getKind() == NodeKind.INVOCATION) {
bLInvocation.async = true;
} else {
dlog.error(workerBodyPos, DiagnosticErrorCode.START_REQUIRE_INVOCATION);
}
BLangSimpleVariable invoc = new SimpleVarBuilder()
.with(workerName, workerNamePos)
.isDeclaredWithVar()
.isWorkerVar()
.setExpression(bLInvocation)
.isFinal()
.setPos(workerNamePos)
.build();
BLangSimpleVariableDef workerInvoc = (BLangSimpleVariableDef) TreeBuilder.createSimpleVariableDefinitionNode();
workerInvoc.pos = workerNamePos;
workerInvoc.setVariable(invoc);
workerInvoc.isWorker = true;
invoc.flagSet.add(Flag.WORKER);
this.additionalStatements.push(workerInvoc);
return lamdaWrkr;
}
private <A extends BLangNode, B extends Node> List<A> applyAll(NodeList<B> annotations) {
ArrayList<A> annAttachments = new ArrayList<>();
if (annotations == null) {
return annAttachments;
}
for (B annotation : annotations) {
A blNode = (A) annotation.apply(this);
annAttachments.add(blNode);
}
return annAttachments;
}
@Override
public BLangNode transform(AnnotationNode annotation) {
Node name = annotation.annotReference();
BLangAnnotationAttachment bLAnnotationAttachment =
(BLangAnnotationAttachment) TreeBuilder.createAnnotAttachmentNode();
if (annotation.annotValue().isPresent()) {
MappingConstructorExpressionNode map = annotation.annotValue().get();
BLangExpression bLExpression = (BLangExpression) map.apply(this);
bLAnnotationAttachment.setExpression(bLExpression);
}
BLangNameReference nameReference = createBLangNameReference(name);
bLAnnotationAttachment.setAnnotationName(nameReference.name);
bLAnnotationAttachment.setPackageAlias(nameReference.pkgAlias);
bLAnnotationAttachment.pos = getPosition(annotation);
return bLAnnotationAttachment;
}
@Override
public BLangNode transform(QueryActionNode queryActionNode) {
BLangQueryAction bLQueryAction = (BLangQueryAction) TreeBuilder.createQueryActionNode();
BLangDoClause doClause = (BLangDoClause) TreeBuilder.createDoClauseNode();
doClause.body = (BLangBlockStmt) queryActionNode.blockStatement().apply(this);
doClause.body.pos = expandLeft(doClause.body.pos, getPosition(queryActionNode.doKeyword()));
doClause.pos = doClause.body.pos;
bLQueryAction.queryClauseList.add(queryActionNode.queryPipeline().fromClause().apply(this));
bLQueryAction.queryClauseList.addAll(applyAll(queryActionNode.queryPipeline().intermediateClauses()));
bLQueryAction.queryClauseList.add(doClause);
bLQueryAction.doClause = doClause;
bLQueryAction.pos = getPosition(queryActionNode);
return bLQueryAction;
}
@Override
public BLangNode transform(AnnotationDeclarationNode annotationDeclarationNode) {
BLangAnnotation annotationDecl = (BLangAnnotation) TreeBuilder.createAnnotationNode();
Location pos = getPositionWithoutMetadata(annotationDeclarationNode);
annotationDecl.pos = pos;
annotationDecl.name = createIdentifier(annotationDeclarationNode.annotationTag());
if (annotationDeclarationNode.visibilityQualifier().isPresent()) {
annotationDecl.addFlag(Flag.PUBLIC);
}
if (annotationDeclarationNode.constKeyword().isPresent()) {
annotationDecl.addFlag(Flag.CONSTANT);
}
annotationDecl.annAttachments = applyAll(getAnnotations(annotationDeclarationNode.metadata()));
annotationDecl.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(annotationDeclarationNode.metadata()));
Optional<Node> typedesc = annotationDeclarationNode.typeDescriptor();
if (typedesc.isPresent()) {
annotationDecl.typeNode = createTypeNode(typedesc.get());
}
SeparatedNodeList<Node> paramList = annotationDeclarationNode.attachPoints();
for (Node child : paramList) {
AnnotationAttachPointNode attachPoint = (AnnotationAttachPointNode) child;
boolean source = attachPoint.sourceKeyword().isPresent();
AttachPoint bLAttachPoint;
NodeList<Token> idents = attachPoint.identifiers();
Token firstIndent = idents.get(0);
switch (firstIndent.kind()) {
case OBJECT_KEYWORD:
Token secondIndent = idents.get(1);
switch (secondIndent.kind()) {
case FUNCTION_KEYWORD:
bLAttachPoint =
AttachPoint.getAttachmentPoint(AttachPoint.Point.OBJECT_METHOD.getValue(), source);
break;
case FIELD_KEYWORD:
bLAttachPoint =
AttachPoint.getAttachmentPoint(AttachPoint.Point.OBJECT_FIELD.getValue(), source);
break;
default:
throw new RuntimeException("Syntax kind is not supported: " + secondIndent.kind());
}
break;
case SERVICE_KEYWORD:
String value;
if (idents.size() == 1) {
value = AttachPoint.Point.SERVICE.getValue();
} else if (idents.size() == 3) {
value = AttachPoint.Point.SERVICE_REMOTE.getValue();
} else {
throw new RuntimeException("Invalid annotation attach point");
}
bLAttachPoint = AttachPoint.getAttachmentPoint(value, source);
break;
case RECORD_KEYWORD:
bLAttachPoint = AttachPoint.getAttachmentPoint(AttachPoint.Point.RECORD_FIELD.getValue(), source);
break;
default:
bLAttachPoint = AttachPoint.getAttachmentPoint(firstIndent.text(), source);
}
annotationDecl.addAttachPoint(bLAttachPoint);
}
return annotationDecl;
}
@Override
public BLangNode transform(AnnotAccessExpressionNode annotAccessExpressionNode) {
BLangAnnotAccessExpr annotAccessExpr = (BLangAnnotAccessExpr) TreeBuilder.createAnnotAccessExpressionNode();
Node annotTagReference = annotAccessExpressionNode.annotTagReference();
if (annotAccessExpressionNode.annotTagReference().kind() == SyntaxKind.SIMPLE_NAME_REFERENCE) {
SimpleNameReferenceNode annotName = (SimpleNameReferenceNode) annotTagReference;
annotAccessExpr.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
annotAccessExpr.annotationName = createIdentifier(annotName.name());
} else {
QualifiedNameReferenceNode qulifiedName =
(QualifiedNameReferenceNode) annotTagReference;
annotAccessExpr.pkgAlias = createIdentifier(qulifiedName.modulePrefix());
annotAccessExpr.annotationName = createIdentifier(qulifiedName.identifier());
}
annotAccessExpr.pos = getPosition(annotAccessExpressionNode);
annotAccessExpr.expr = createExpression(annotAccessExpressionNode.expression());
return annotAccessExpr;
}
@Override
public BLangNode transform(ConditionalExpressionNode conditionalExpressionNode) {
BLangTernaryExpr ternaryExpr = (BLangTernaryExpr) TreeBuilder.createTernaryExpressionNode();
ternaryExpr.pos = getPosition(conditionalExpressionNode);
ternaryExpr.elseExpr = createExpression(conditionalExpressionNode.endExpression());
ternaryExpr.thenExpr = createExpression(conditionalExpressionNode.middleExpression());
ternaryExpr.expr = createExpression(conditionalExpressionNode.lhsExpression());
if (ternaryExpr.expr.getKind() == NodeKind.TERNARY_EXPR) {
BLangTernaryExpr root = (BLangTernaryExpr) ternaryExpr.expr;
BLangTernaryExpr parent = root;
while (parent.elseExpr.getKind() == NodeKind.TERNARY_EXPR) {
parent = (BLangTernaryExpr) parent.elseExpr;
}
ternaryExpr.expr = parent.elseExpr;
parent.elseExpr = ternaryExpr;
ternaryExpr = root;
}
return ternaryExpr;
}
@Override
public BLangNode transform(CheckExpressionNode checkExpressionNode) {
Location pos = getPosition(checkExpressionNode);
BLangExpression expr = createExpression(checkExpressionNode.expression());
if (checkExpressionNode.checkKeyword().kind() == SyntaxKind.CHECK_KEYWORD) {
return createCheckExpr(pos, expr);
}
return createCheckPanickedExpr(pos, expr);
}
@Override
public BLangNode transform(TypeTestExpressionNode typeTestExpressionNode) {
BLangTypeTestExpr typeTestExpr = (BLangTypeTestExpr) TreeBuilder.createTypeTestExpressionNode();
typeTestExpr.expr = createExpression(typeTestExpressionNode.expression());
typeTestExpr.typeNode = createTypeNode(typeTestExpressionNode.typeDescriptor());
typeTestExpr.pos = getPosition(typeTestExpressionNode);
return typeTestExpr;
}
@Override
public BLangNode transform(MappingConstructorExpressionNode mapConstruct) {
BLangRecordLiteral bLiteralNode = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
for (MappingFieldNode field : mapConstruct.fields()) {
if (field.kind() == SyntaxKind.SPREAD_FIELD) {
SpreadFieldNode spreadFieldNode = (SpreadFieldNode) field;
BLangRecordSpreadOperatorField bLRecordSpreadOpField =
(BLangRecordSpreadOperatorField) TreeBuilder.createRecordSpreadOperatorField();
bLRecordSpreadOpField.expr = createExpression(spreadFieldNode.valueExpr());
bLRecordSpreadOpField.pos = getPosition(spreadFieldNode);
bLiteralNode.fields.add(bLRecordSpreadOpField);
} else if (field.kind() == SyntaxKind.COMPUTED_NAME_FIELD) {
ComputedNameFieldNode computedNameField = (ComputedNameFieldNode) field;
BLangRecordKeyValueField bLRecordKeyValueField =
(BLangRecordKeyValueField) TreeBuilder.createRecordKeyValue();
bLRecordKeyValueField.valueExpr = createExpression(computedNameField.valueExpr());
bLRecordKeyValueField.key =
new BLangRecordLiteral.BLangRecordKey(createExpression(computedNameField.fieldNameExpr()));
bLRecordKeyValueField.key.computedKey = true;
bLiteralNode.fields.add(bLRecordKeyValueField);
} else {
SpecificFieldNode specificField = (SpecificFieldNode) field;
io.ballerina.compiler.syntax.tree.ExpressionNode valueExpr = specificField.valueExpr().orElse(null);
if (valueExpr == null) {
BLangRecordLiteral.BLangRecordVarNameField fieldVar =
(BLangRecordLiteral.BLangRecordVarNameField) TreeBuilder.createRecordVarRefNameFieldNode();
fieldVar.variableName = createIdentifier((Token) ((SpecificFieldNode) field).fieldName());
fieldVar.pkgAlias = createIdentifier(null, "");
fieldVar.pos = fieldVar.variableName.pos;
fieldVar.readonly = specificField.readonlyKeyword().isPresent();
bLiteralNode.fields.add(fieldVar);
} else {
BLangRecordKeyValueField bLRecordKeyValueField =
(BLangRecordKeyValueField) TreeBuilder.createRecordKeyValue();
bLRecordKeyValueField.pos = getPosition(specificField);
bLRecordKeyValueField.readonly = specificField.readonlyKeyword().isPresent();
bLRecordKeyValueField.valueExpr = createExpression(valueExpr);
bLRecordKeyValueField.key =
new BLangRecordLiteral.BLangRecordKey(createExpression(specificField.fieldName()));
bLRecordKeyValueField.key.computedKey = false;
bLRecordKeyValueField.key.pos = getPosition(specificField.fieldName());
bLiteralNode.fields.add(bLRecordKeyValueField);
}
}
}
bLiteralNode.pos = getPosition(mapConstruct);
return bLiteralNode;
}
@Override
public BLangNode transform(ListConstructorExpressionNode listConstructorExprNode) {
List<BLangExpression> argExprList = new ArrayList<>();
BLangListConstructorExpr listConstructorExpr = (BLangListConstructorExpr)
TreeBuilder.createListConstructorExpressionNode();
for (Node expr : listConstructorExprNode.expressions()) {
argExprList.add(createExpression(expr));
}
listConstructorExpr.exprs = argExprList;
listConstructorExpr.pos = getPosition(listConstructorExprNode);
return listConstructorExpr;
}
@Override
public BLangNode transform(UnaryExpressionNode unaryExprNode) {
Location pos = getPosition(unaryExprNode);
SyntaxKind expressionKind = unaryExprNode.expression().kind();
SyntaxKind unaryOperatorKind = unaryExprNode.unaryOperator().kind();
if (expressionKind == SyntaxKind.NUMERIC_LITERAL &&
(unaryOperatorKind == SyntaxKind.MINUS_TOKEN || unaryOperatorKind == SyntaxKind.PLUS_TOKEN)) {
return createSimpleLiteral(unaryExprNode);
}
OperatorKind operator = OperatorKind.valueFrom(unaryExprNode.unaryOperator().text());
BLangExpression expr = createExpression(unaryExprNode.expression());
return createBLangUnaryExpr(pos, operator, expr);
}
@Override
public BLangNode transform(TypeofExpressionNode typeofExpressionNode) {
Location pos = getPosition(typeofExpressionNode);
OperatorKind operator = OperatorKind.valueFrom(typeofExpressionNode.typeofKeyword().text());
BLangExpression expr = createExpression(typeofExpressionNode.expression());
return createBLangUnaryExpr(pos, operator, expr);
}
@Override
public BLangNode transform(BinaryExpressionNode binaryExprNode) {
if (binaryExprNode.operator().kind() == SyntaxKind.ELVIS_TOKEN) {
BLangElvisExpr elvisExpr = (BLangElvisExpr) TreeBuilder.createElvisExpressionNode();
elvisExpr.pos = getPosition(binaryExprNode);
elvisExpr.lhsExpr = createExpression(binaryExprNode.lhsExpr());
elvisExpr.rhsExpr = createExpression(binaryExprNode.rhsExpr());
return elvisExpr;
}
BLangBinaryExpr bLBinaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
bLBinaryExpr.pos = getPosition(binaryExprNode);
bLBinaryExpr.lhsExpr = createExpression(binaryExprNode.lhsExpr());
bLBinaryExpr.rhsExpr = createExpression(binaryExprNode.rhsExpr());
bLBinaryExpr.opKind = OperatorKind.valueFrom(binaryExprNode.operator().text());
return bLBinaryExpr;
}
@Override
public BLangNode transform(FieldAccessExpressionNode fieldAccessExprNode) {
BLangFieldBasedAccess bLFieldBasedAccess;
Node fieldName = fieldAccessExprNode.fieldName();
if (fieldName.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
QualifiedNameReferenceNode qualifiedFieldName = (QualifiedNameReferenceNode) fieldName;
BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess accessWithPrefixNode =
(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess)
TreeBuilder.createFieldBasedAccessWithPrefixNode();
accessWithPrefixNode.nsPrefix = createIdentifier(qualifiedFieldName.modulePrefix());
accessWithPrefixNode.field = createIdentifier(qualifiedFieldName.identifier());
bLFieldBasedAccess = accessWithPrefixNode;
bLFieldBasedAccess.fieldKind = FieldKind.WITH_NS;
} else {
bLFieldBasedAccess = (BLangFieldBasedAccess) TreeBuilder.createFieldBasedAccessNode();
bLFieldBasedAccess.field =
createIdentifier(((SimpleNameReferenceNode) fieldName).name());
bLFieldBasedAccess.fieldKind = FieldKind.SINGLE;
}
io.ballerina.compiler.syntax.tree.ExpressionNode containerExpr = fieldAccessExprNode.expression();
if (containerExpr.kind() == SyntaxKind.BRACED_EXPRESSION) {
bLFieldBasedAccess.expr = createExpression(((BracedExpressionNode) containerExpr).expression());
} else {
bLFieldBasedAccess.expr = createExpression(containerExpr);
}
bLFieldBasedAccess.pos = getPosition(fieldAccessExprNode);
bLFieldBasedAccess.field.pos = getPosition(fieldAccessExprNode.fieldName());
bLFieldBasedAccess.optionalFieldAccess = false;
return bLFieldBasedAccess;
}
@Override
public BLangNode transform(OptionalFieldAccessExpressionNode optionalFieldAccessExpressionNode) {
BLangFieldBasedAccess bLFieldBasedAccess;
Node fieldName = optionalFieldAccessExpressionNode.fieldName();
if (fieldName.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
QualifiedNameReferenceNode qualifiedFieldName = (QualifiedNameReferenceNode) fieldName;
BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess accessWithPrefixNode =
(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) TreeBuilder
.createFieldBasedAccessWithPrefixNode();
accessWithPrefixNode.nsPrefix = createIdentifier(qualifiedFieldName.modulePrefix());
accessWithPrefixNode.field = createIdentifier(qualifiedFieldName.identifier());
bLFieldBasedAccess = accessWithPrefixNode;
bLFieldBasedAccess.fieldKind = FieldKind.WITH_NS;
} else {
bLFieldBasedAccess = (BLangFieldBasedAccess) TreeBuilder.createFieldBasedAccessNode();
bLFieldBasedAccess.field = createIdentifier(((SimpleNameReferenceNode) fieldName).name());
bLFieldBasedAccess.fieldKind = FieldKind.SINGLE;
}
bLFieldBasedAccess.pos = getPosition(optionalFieldAccessExpressionNode);
bLFieldBasedAccess.field.pos = getPosition(optionalFieldAccessExpressionNode.fieldName());
bLFieldBasedAccess.expr = createExpression(optionalFieldAccessExpressionNode.expression());
bLFieldBasedAccess.optionalFieldAccess = true;
return bLFieldBasedAccess;
}
@Override
public BLangNode transform(BracedExpressionNode brcExprOut) {
return createExpression(brcExprOut.expression());
}
@Override
public BLangNode transform(FunctionCallExpressionNode functionCallNode) {
return createBLangInvocation(functionCallNode.functionName(), functionCallNode.arguments(),
getPosition(functionCallNode), isFunctionCallAsync(functionCallNode));
}
@Override
public BLangNode transform(ErrorConstructorExpressionNode errorConstructorExprNode) {
BLangErrorConstructorExpr errorConstructorExpr =
(BLangErrorConstructorExpr) TreeBuilder.createErrorConstructorExpressionNode();
errorConstructorExpr.pos = getPosition(errorConstructorExprNode);
if (errorConstructorExprNode.typeReference().isPresent()) {
errorConstructorExpr.errorTypeRef =
(BLangUserDefinedType) createTypeNode(errorConstructorExprNode.typeReference().get());
}
List<BLangExpression> positionalArgs = new ArrayList<>();
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>();
for (Node argNode : errorConstructorExprNode.arguments()) {
if (argNode.kind() == SyntaxKind.POSITIONAL_ARG) {
positionalArgs.add((BLangExpression) transform((PositionalArgumentNode) argNode));
} else if (argNode.kind() == SyntaxKind.NAMED_ARG) {
namedArgs.add((BLangNamedArgsExpression) transform((NamedArgumentNode) argNode));
}
}
errorConstructorExpr.positionalArgs = positionalArgs;
errorConstructorExpr.namedArgs = namedArgs;
return errorConstructorExpr;
}
public BLangNode transform(MethodCallExpressionNode methodCallExprNode) {
BLangInvocation bLInvocation = createBLangInvocation(methodCallExprNode.methodName(),
methodCallExprNode.arguments(),
getPosition(methodCallExprNode), false);
bLInvocation.expr = createExpression(methodCallExprNode.expression());
return bLInvocation;
}
@Override
public BLangNode transform(ImplicitNewExpressionNode implicitNewExprNode) {
BLangTypeInit initNode = createTypeInit(implicitNewExprNode);
BLangInvocation invocationNode = createInvocation(implicitNewExprNode, implicitNewExprNode.newKeyword());
initNode.argsExpr.addAll(invocationNode.argExprs);
initNode.initInvocation = invocationNode;
return initNode;
}
@Override
public BLangNode transform(ExplicitNewExpressionNode explicitNewExprNode) {
BLangTypeInit initNode = createTypeInit(explicitNewExprNode);
BLangInvocation invocationNode = createInvocation(explicitNewExprNode, explicitNewExprNode.newKeyword());
initNode.argsExpr.addAll(invocationNode.argExprs);
initNode.initInvocation = invocationNode;
return initNode;
}
private boolean isFunctionCallAsync(FunctionCallExpressionNode functionCallExpressionNode) {
return functionCallExpressionNode.parent().kind() == SyntaxKind.START_ACTION;
}
private BLangTypeInit createTypeInit(NewExpressionNode expression) {
BLangTypeInit initNode = (BLangTypeInit) TreeBuilder.createInitNode();
initNode.pos = getPosition(expression);
if (expression.kind() == SyntaxKind.EXPLICIT_NEW_EXPRESSION) {
Node type = ((ExplicitNewExpressionNode) expression).typeDescriptor();
initNode.userDefinedType = createTypeNode(type);
}
return initNode;
}
private BLangInvocation createInvocation(NewExpressionNode expression, Token newKeyword) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = getPosition(expression);
populateArgsInvocation(expression, invocationNode);
BLangNameReference nameReference = createBLangNameReference(newKeyword);
invocationNode.name = (BLangIdentifier) nameReference.name;
invocationNode.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
return invocationNode;
}
private void populateArgsInvocation(NewExpressionNode expression, BLangInvocation invocationNode) {
Iterator<FunctionArgumentNode> argumentsIter = getArgumentNodesIterator(expression);
if (argumentsIter != null) {
while (argumentsIter.hasNext()) {
BLangExpression argument = createExpression(argumentsIter.next());
invocationNode.argExprs.add(argument);
}
}
}
private Iterator<FunctionArgumentNode> getArgumentNodesIterator(NewExpressionNode expression) {
Iterator<FunctionArgumentNode> argumentsIter = null;
if (expression.kind() == SyntaxKind.IMPLICIT_NEW_EXPRESSION) {
Optional<ParenthesizedArgList> argsList = ((ImplicitNewExpressionNode) expression).parenthesizedArgList();
if (argsList.isPresent()) {
ParenthesizedArgList argList = argsList.get();
argumentsIter = argList.arguments().iterator();
}
} else {
ParenthesizedArgList argList =
(ParenthesizedArgList) ((ExplicitNewExpressionNode) expression).parenthesizedArgList();
argumentsIter = argList.arguments().iterator();
}
return argumentsIter;
}
@Override
public BLangNode transform(IndexedExpressionNode indexedExpressionNode) {
BLangIndexBasedAccess indexBasedAccess = (BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode();
indexBasedAccess.pos = getPosition(indexedExpressionNode);
SeparatedNodeList<io.ballerina.compiler.syntax.tree.ExpressionNode> keys =
indexedExpressionNode.keyExpression();
if (keys.size() == 1) {
indexBasedAccess.indexExpr = createExpression(indexedExpressionNode.keyExpression().get(0));
} else {
BLangTableMultiKeyExpr multiKeyExpr =
(BLangTableMultiKeyExpr) TreeBuilder.createTableMultiKeyExpressionNode();
multiKeyExpr.pos = getPosition(keys.get(0), keys.get(keys.size() - 1));
List<BLangExpression> multiKeyIndexExprs = new ArrayList<>();
for (io.ballerina.compiler.syntax.tree.ExpressionNode keyExpr : keys) {
multiKeyIndexExprs.add(createExpression(keyExpr));
}
multiKeyExpr.multiKeyIndexExprs = multiKeyIndexExprs;
indexBasedAccess.indexExpr = multiKeyExpr;
}
Node containerExpr = indexedExpressionNode.containerExpression();
BLangExpression expression = createExpression(containerExpr);
if (containerExpr.kind() == SyntaxKind.BRACED_EXPRESSION) {
indexBasedAccess.expr = ((BLangGroupExpr) expression).expression;
BLangGroupExpr group = (BLangGroupExpr) TreeBuilder.createGroupExpressionNode();
group.expression = indexBasedAccess;
group.pos = getPosition(indexedExpressionNode);
return group;
} else if (containerExpr.kind() == SyntaxKind.XML_STEP_EXPRESSION) {
((BLangXMLNavigationAccess) expression).childIndex = indexBasedAccess.indexExpr;
return expression;
}
indexBasedAccess.expr = expression;
return indexBasedAccess;
}
@Override
public BLangTypeConversionExpr transform(TypeCastExpressionNode typeCastExpressionNode) {
BLangTypeConversionExpr typeConversionNode = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
typeConversionNode.pos = getPosition(typeCastExpressionNode);
TypeCastParamNode typeCastParamNode = typeCastExpressionNode.typeCastParam();
if (typeCastParamNode != null && typeCastParamNode.type().isPresent()) {
typeConversionNode.typeNode = createTypeNode(typeCastParamNode.type().get());
}
typeConversionNode.expr = createExpression(typeCastExpressionNode.expression());
typeConversionNode.annAttachments = applyAll(typeCastParamNode.annotations());
return typeConversionNode;
}
@Override
public BLangNode transform(Token token) {
SyntaxKind kind = token.kind();
switch (kind) {
case XML_TEXT_CONTENT:
case TEMPLATE_STRING:
case CLOSE_BRACE_TOKEN:
return createSimpleLiteral(token);
default:
throw new RuntimeException("Syntax kind is not supported: " + kind);
}
}
@Override
public BLangNode transform(InterpolationNode interpolationNode) {
return createExpression(interpolationNode.expression());
}
@Override
public BLangNode transform(TemplateExpressionNode expressionNode) {
SyntaxKind kind = expressionNode.kind();
switch (kind) {
case XML_TEMPLATE_EXPRESSION:
return createXmlTemplateLiteral(expressionNode);
case STRING_TEMPLATE_EXPRESSION:
return createStringTemplateLiteral(expressionNode.content(), getPosition(expressionNode));
case RAW_TEMPLATE_EXPRESSION:
return createRawTemplateLiteral(expressionNode.content(), getPosition(expressionNode));
default:
throw new RuntimeException("Syntax kind is not supported: " + kind);
}
}
@Override
public BLangNode transform(TableConstructorExpressionNode tableConstructorExpressionNode) {
BLangTableConstructorExpr tableConstructorExpr =
(BLangTableConstructorExpr) TreeBuilder.createTableConstructorExpressionNode();
tableConstructorExpr.pos = getPosition(tableConstructorExpressionNode);
for (Node row : tableConstructorExpressionNode.rows()) {
tableConstructorExpr.addRecordLiteral((BLangRecordLiteral) row.apply(this));
}
if (tableConstructorExpressionNode.keySpecifier().isPresent()) {
tableConstructorExpr.tableKeySpecifier =
(BLangTableKeySpecifier) tableConstructorExpressionNode.keySpecifier().orElse(null).apply(this);
}
return tableConstructorExpr;
}
@Override
public BLangNode transform(TrapExpressionNode trapExpressionNode) {
BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode();
trapExpr.expr = createExpression(trapExpressionNode.expression());
trapExpr.pos = getPosition(trapExpressionNode);
return trapExpr;
}
@Override
public BLangNode transform(ReceiveActionNode receiveActionNode) {
BLangWorkerReceive workerReceiveExpr = (BLangWorkerReceive) TreeBuilder.createWorkerReceiveNode();
Node receiveWorkers = receiveActionNode.receiveWorkers();
Token workerName;
if (receiveWorkers.kind() == SyntaxKind.SIMPLE_NAME_REFERENCE) {
workerName = ((SimpleNameReferenceNode) receiveWorkers).name();
} else {
Location receiveFieldsPos = getPosition(receiveWorkers);
dlog.error(receiveFieldsPos, DiagnosticErrorCode.MULTIPLE_RECEIVE_ACTION_NOT_YET_SUPPORTED);
workerName = NodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN,
NodeFactory.createEmptyMinutiaeList(), NodeFactory.createEmptyMinutiaeList());
}
workerReceiveExpr.setWorkerName(createIdentifier(workerName));
workerReceiveExpr.pos = getPosition(receiveActionNode);
return workerReceiveExpr;
}
@Override
public BLangNode transform(SyncSendActionNode syncSendActionNode) {
BLangWorkerSyncSendExpr workerSendExpr = TreeBuilder.createWorkerSendSyncExprNode();
workerSendExpr.setWorkerName(createIdentifier(
syncSendActionNode.peerWorker().name()));
workerSendExpr.expr = createExpression(syncSendActionNode.expression());
workerSendExpr.pos = getPosition(syncSendActionNode);
return workerSendExpr;
}
@Override
public BLangNode transform(ImplicitAnonymousFunctionExpressionNode implicitAnonymousFunctionExpressionNode) {
BLangArrowFunction arrowFunction = (BLangArrowFunction) TreeBuilder.createArrowFunctionNode();
arrowFunction.pos = getPosition(implicitAnonymousFunctionExpressionNode);
arrowFunction.functionName = createIdentifier(arrowFunction.pos,
anonymousModelHelper.getNextAnonymousFunctionKey(packageID));
Node param = implicitAnonymousFunctionExpressionNode.params();
if (param.kind() == SyntaxKind.INFER_PARAM_LIST) {
ImplicitAnonymousFunctionParameters paramsNode = (ImplicitAnonymousFunctionParameters) param;
SeparatedNodeList<SimpleNameReferenceNode> paramList = paramsNode.parameters();
for (SimpleNameReferenceNode child : paramList) {
BLangUserDefinedType userDefinedType = (BLangUserDefinedType) child.apply(this);
BLangSimpleVariable parameter = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
parameter.name = userDefinedType.typeName;
parameter.pos = getPosition(child);
arrowFunction.params.add(parameter);
}
} else {
BLangUserDefinedType userDefinedType = (BLangUserDefinedType) param.apply(this);
BLangSimpleVariable parameter = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
parameter.name = userDefinedType.typeName;
parameter.pos = getPosition(param);
arrowFunction.params.add(parameter);
}
arrowFunction.body = new BLangExprFunctionBody();
arrowFunction.body.expr = createExpression(implicitAnonymousFunctionExpressionNode.expression());
arrowFunction.body.pos = arrowFunction.body.expr.pos;
return arrowFunction;
}
@Override
public BLangNode transform(CommitActionNode commitActionNode) {
BLangCommitExpr commitExpr = TreeBuilder.createCommitExpressionNode();
commitExpr.pos = getPosition(commitActionNode);
return commitExpr;
}
@Override
public BLangNode transform(FlushActionNode flushActionNode) {
BLangWorkerFlushExpr workerFlushExpr = TreeBuilder.createWorkerFlushExpressionNode();
Node optionalPeerWorker = flushActionNode.peerWorker().orElse(null);
if (optionalPeerWorker != null) {
SimpleNameReferenceNode peerWorker = (SimpleNameReferenceNode) optionalPeerWorker;
workerFlushExpr.workerIdentifier = createIdentifier(peerWorker.name());
}
workerFlushExpr.pos = getPosition(flushActionNode);
return workerFlushExpr;
}
@Override
public BLangNode transform(LetExpressionNode letExpressionNode) {
BLangLetExpression letExpr = (BLangLetExpression) TreeBuilder.createLetExpressionNode();
letExpr.pos = getPosition(letExpressionNode);
letExpr.expr = createExpression(letExpressionNode.expression());
List<BLangLetVariable> letVars = new ArrayList<>();
for (LetVariableDeclarationNode letVarDecl : letExpressionNode.letVarDeclarations()) {
letVars.add(createLetVariable(letVarDecl));
}
letExpr.letVarDeclarations = letVars;
return letExpr;
}
public BLangLetVariable createLetVariable(LetVariableDeclarationNode letVarDecl) {
BLangLetVariable letVar = TreeBuilder.createLetVariableNode();
VariableDefinitionNode varDefNode = createBLangVarDef(getPosition(letVarDecl), letVarDecl.typedBindingPattern(),
Optional.of(letVarDecl.expression()), Optional.empty());
varDefNode.getVariable().addFlag(Flag.FINAL);
List<BLangNode> annots = applyAll(letVarDecl.annotations());
for (BLangNode node : annots) {
varDefNode.getVariable().addAnnotationAttachment((AnnotationAttachmentNode) node);
}
letVar.definitionNode = varDefNode;
return letVar;
}
@Override
public BLangNode transform(MappingBindingPatternNode mappingBindingPatternNode) {
BLangRecordVarRef recordVarRef = (BLangRecordVarRef) TreeBuilder.createRecordVariableReferenceNode();
recordVarRef.pos = getPosition(mappingBindingPatternNode);
List<BLangRecordVarRefKeyValue> expressions = new ArrayList<>();
for (BindingPatternNode expr : mappingBindingPatternNode.fieldBindingPatterns()) {
if (expr.kind() == SyntaxKind.REST_BINDING_PATTERN) {
recordVarRef.restParam = createExpression(expr);
} else {
expressions.add(createRecordVarKeyValue(expr));
}
}
recordVarRef.recordRefFields = expressions;
return recordVarRef;
}
private BLangRecordVarRefKeyValue createRecordVarKeyValue(BindingPatternNode expr) {
BLangRecordVarRefKeyValue keyValue = new BLangRecordVarRefKeyValue();
if (expr instanceof FieldBindingPatternFullNode) {
FieldBindingPatternFullNode fullNode = (FieldBindingPatternFullNode) expr;
keyValue.variableName = createIdentifier(fullNode.variableName().name());
keyValue.variableReference = createExpression(fullNode.bindingPattern());
} else {
FieldBindingPatternVarnameNode varnameNode = (FieldBindingPatternVarnameNode) expr;
keyValue.variableName = createIdentifier(varnameNode.variableName().name());
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = getPosition(varnameNode.variableName());
varRef.variableName = createIdentifier(varnameNode.variableName().name());
varRef.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
keyValue.variableReference = varRef;
}
return keyValue;
}
@Override
public BLangNode transform(ListBindingPatternNode listBindingPatternNode) {
BLangTupleVarRef tupleVarRef = (BLangTupleVarRef) TreeBuilder.createTupleVariableReferenceNode();
List<BLangExpression> expressions = new ArrayList<>();
for (BindingPatternNode expr : listBindingPatternNode.bindingPatterns()) {
if (expr.kind() == SyntaxKind.REST_BINDING_PATTERN) {
tupleVarRef.restParam = createExpression(expr);
} else {
expressions.add(createExpression(expr));
}
}
tupleVarRef.expressions = expressions;
tupleVarRef.pos = getPosition(listBindingPatternNode);
return tupleVarRef;
}
@Override
public BLangNode transform(RestBindingPatternNode restBindingPatternNode) {
return createExpression(restBindingPatternNode.variableName());
}
@Override
public BLangNode transform(CaptureBindingPatternNode captureBindingPatternNode) {
return createExpression(captureBindingPatternNode.variableName());
}
@Override
public BLangNode transform(WildcardBindingPatternNode wildcardBindingPatternNode) {
BLangSimpleVarRef ignoreVarRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
BLangIdentifier ignore = (BLangIdentifier) TreeBuilder.createIdentifierNode();
ignore.value = Names.IGNORE.value;
ignoreVarRef.variableName = ignore;
ignore.pos = getPosition(wildcardBindingPatternNode);
return ignoreVarRef;
}
@Override
public BLangNode transform(ErrorBindingPatternNode errorBindingPatternNode) {
BLangErrorVarRef errorVarRef = (BLangErrorVarRef) TreeBuilder.createErrorVariableReferenceNode();
errorVarRef.pos = getPosition(errorBindingPatternNode);
Optional<Node> errorTypeRef = errorBindingPatternNode.typeReference();
if (errorTypeRef.isPresent()) {
errorVarRef.typeNode = createTypeNode(errorTypeRef.get());
}
SeparatedNodeList<BindingPatternNode> argListBindingPatterns = errorBindingPatternNode.argListBindingPatterns();
int numberOfArgs = argListBindingPatterns.size();
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>();
for (int position = 0; position < numberOfArgs; position++) {
BindingPatternNode bindingPatternNode = argListBindingPatterns.get(position);
switch (bindingPatternNode.kind()) {
case CAPTURE_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
if (position == 0) {
errorVarRef.message = (BLangVariableReference) createExpression(bindingPatternNode);
break;
}
case ERROR_BINDING_PATTERN:
errorVarRef.cause = (BLangVariableReference) createExpression(bindingPatternNode);
break;
case NAMED_ARG_BINDING_PATTERN:
namedArgs.add((BLangNamedArgsExpression) bindingPatternNode.apply(this));
break;
default:
errorVarRef.restVar = (BLangVariableReference) createExpression(bindingPatternNode);
}
}
errorVarRef.detail = namedArgs;
return errorVarRef;
}
@Override
public BLangNode transform(NamedArgBindingPatternNode namedArgBindingPatternNode) {
BLangNamedArgsExpression namedArgsExpression = (BLangNamedArgsExpression) TreeBuilder.createNamedArgNode();
namedArgsExpression.pos = getPosition(namedArgBindingPatternNode);
namedArgsExpression.name = createIdentifier(namedArgBindingPatternNode.argName());
namedArgsExpression.expr = createExpression(namedArgBindingPatternNode.bindingPattern());
return namedArgsExpression;
}
@Override
public BLangNode transform(ReturnStatementNode returnStmtNode) {
BLangReturn bLReturn = (BLangReturn) TreeBuilder.createReturnNode();
bLReturn.pos = getPosition(returnStmtNode);
if (returnStmtNode.expression().isPresent()) {
bLReturn.expr = createExpression(returnStmtNode.expression().get());
} else {
BLangLiteral nilLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
nilLiteral.pos = getPosition(returnStmtNode);
nilLiteral.value = Names.NIL_VALUE;
nilLiteral.type = symTable.nilType;
bLReturn.expr = nilLiteral;
}
return bLReturn;
}
@Override
public BLangNode transform(PanicStatementNode panicStmtNode) {
BLangPanic bLPanic = (BLangPanic) TreeBuilder.createPanicNode();
bLPanic.pos = getPosition(panicStmtNode);
bLPanic.expr = createExpression(panicStmtNode.expression());
return bLPanic;
}
@Override
public BLangNode transform(ContinueStatementNode continueStmtNode) {
BLangContinue bLContinue = (BLangContinue) TreeBuilder.createContinueNode();
bLContinue.pos = getPosition(continueStmtNode);
return bLContinue;
}
@Override
public BLangNode transform(ListenerDeclarationNode listenerDeclarationNode) {
Token visibilityQualifier = null;
if (listenerDeclarationNode.visibilityQualifier().isPresent()) {
visibilityQualifier = listenerDeclarationNode.visibilityQualifier().get();
}
BLangSimpleVariable var = new SimpleVarBuilder()
.with(listenerDeclarationNode.variableName())
.setTypeByNode(listenerDeclarationNode.typeDescriptor().orElse(null))
.setExpressionByNode(listenerDeclarationNode.initializer())
.setVisibility(visibilityQualifier)
.isListenerVar()
.build();
var.pos = getPositionWithoutMetadata(listenerDeclarationNode);
var.name.pos = getPosition(listenerDeclarationNode.variableName());
var.annAttachments = applyAll(getAnnotations(listenerDeclarationNode.metadata()));
return var;
}
@Override
public BLangNode transform(BreakStatementNode breakStmtNode) {
BLangBreak bLBreak = (BLangBreak) TreeBuilder.createBreakNode();
bLBreak.pos = getPosition(breakStmtNode);
return bLBreak;
}
@Override
public BLangNode transform(AssignmentStatementNode assignmentStmtNode) {
SyntaxKind lhsKind = assignmentStmtNode.varRef().kind();
switch (lhsKind) {
case LIST_BINDING_PATTERN:
return createTupleDestructureStatement(assignmentStmtNode);
case MAPPING_BINDING_PATTERN:
return createRecordDestructureStatement(assignmentStmtNode);
case ERROR_BINDING_PATTERN:
return createErrorDestructureStatement(assignmentStmtNode);
default:
break;
}
BLangAssignment bLAssignment = (BLangAssignment) TreeBuilder.createAssignmentNode();
BLangExpression lhsExpr = createExpression(assignmentStmtNode.varRef());
validateLvexpr(lhsExpr, DiagnosticErrorCode.INVALID_INVOCATION_LVALUE_ASSIGNMENT);
bLAssignment.setExpression(createExpression(assignmentStmtNode.expression()));
bLAssignment.pos = getPosition(assignmentStmtNode);
bLAssignment.varRef = lhsExpr;
return bLAssignment;
}
public BLangNode createTupleDestructureStatement(AssignmentStatementNode assignmentStmtNode) {
BLangTupleDestructure tupleDestructure =
(BLangTupleDestructure) TreeBuilder.createTupleDestructureStatementNode();
tupleDestructure.varRef = (BLangTupleVarRef) createExpression(assignmentStmtNode.varRef());
tupleDestructure.setExpression(createExpression(assignmentStmtNode.expression()));
tupleDestructure.pos = getPosition(assignmentStmtNode);
return tupleDestructure;
}
public BLangNode createRecordDestructureStatement(AssignmentStatementNode assignmentStmtNode) {
BLangRecordDestructure recordDestructure =
(BLangRecordDestructure) TreeBuilder.createRecordDestructureStatementNode();
recordDestructure.varRef = (BLangRecordVarRef) createExpression(assignmentStmtNode.varRef());
recordDestructure.setExpression(createExpression(assignmentStmtNode.expression()));
recordDestructure.pos = getPosition(assignmentStmtNode);
return recordDestructure;
}
public BLangNode createErrorDestructureStatement(AssignmentStatementNode assignmentStmtNode) {
BLangErrorDestructure errorDestructure =
(BLangErrorDestructure) TreeBuilder.createErrorDestructureStatementNode();
errorDestructure.varRef = (BLangErrorVarRef) createExpression(assignmentStmtNode.varRef());
errorDestructure.setExpression(createExpression(assignmentStmtNode.expression()));
errorDestructure.pos = getPosition(assignmentStmtNode);
return errorDestructure;
}
@Override
public BLangNode transform(CompoundAssignmentStatementNode compoundAssignmentStmtNode) {
BLangCompoundAssignment bLCompAssignment = (BLangCompoundAssignment) TreeBuilder.createCompoundAssignmentNode();
bLCompAssignment.setExpression(createExpression(compoundAssignmentStmtNode.rhsExpression()));
bLCompAssignment
.setVariable((VariableReferenceNode) createExpression(compoundAssignmentStmtNode.lhsExpression()));
bLCompAssignment.pos = getPosition(compoundAssignmentStmtNode);
bLCompAssignment.opKind = OperatorKind.valueFrom(compoundAssignmentStmtNode.binaryOperator().text());
return bLCompAssignment;
}
private void validateLvexpr(ExpressionNode lExprNode, DiagnosticCode errorCode) {
if (lExprNode.getKind() == NodeKind.INVOCATION) {
dlog.error(((BLangInvocation) lExprNode).pos, errorCode);
}
if (lExprNode.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR
|| lExprNode.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
validateLvexpr(((BLangAccessExpression) lExprNode).expr, errorCode);
}
}
@Override
public BLangNode transform(DoStatementNode doStatementNode) {
BLangDo bLDo = (BLangDo) TreeBuilder.createDoNode();
bLDo.pos = getPosition(doStatementNode);
BLangBlockStmt bLBlockStmt = (BLangBlockStmt) doStatementNode.blockStatement().apply(this);
bLBlockStmt.pos = getPosition(doStatementNode.blockStatement());
bLDo.setBody(bLBlockStmt);
doStatementNode.onFailClause().ifPresent(onFailClauseNode -> {
bLDo.setOnFailClause(
(org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this)));
});
return bLDo;
}
@Override
public BLangNode transform(FailStatementNode failStatementNode) {
BLangFail bLFail = (BLangFail) TreeBuilder.createFailNode();
bLFail.pos = getPosition(failStatementNode);
bLFail.expr = createExpression(failStatementNode.expression());
return bLFail;
}
@Override
public BLangNode transform(WhileStatementNode whileStmtNode) {
BLangWhile bLWhile = (BLangWhile) TreeBuilder.createWhileNode();
bLWhile.setCondition(createExpression(whileStmtNode.condition()));
bLWhile.pos = getPosition(whileStmtNode);
BLangBlockStmt bLBlockStmt = (BLangBlockStmt) whileStmtNode.whileBody().apply(this);
bLBlockStmt.pos = getPosition(whileStmtNode.whileBody());
bLWhile.setBody(bLBlockStmt);
whileStmtNode.onFailClause().ifPresent(onFailClauseNode -> {
bLWhile.setOnFailClause(
(org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this)));
});
return bLWhile;
}
@Override
public BLangNode transform(IfElseStatementNode ifElseStmtNode) {
BLangIf bLIf = (BLangIf) TreeBuilder.createIfElseStatementNode();
bLIf.pos = getPosition(ifElseStmtNode);
bLIf.setCondition(createExpression(ifElseStmtNode.condition()));
bLIf.setBody((BLangBlockStmt) ifElseStmtNode.ifBody().apply(this));
ifElseStmtNode.elseBody().ifPresent(elseBody -> {
ElseBlockNode elseNode = (ElseBlockNode) elseBody;
bLIf.setElseStatement(
(org.ballerinalang.model.tree.statements.StatementNode) elseNode.elseBody().apply(this));
});
return bLIf;
}
@Override
public BLangNode transform(BlockStatementNode blockStatement) {
BLangBlockStmt bLBlockStmt = (BLangBlockStmt) TreeBuilder.createBlockNode();
this.isInLocalContext = true;
bLBlockStmt.stmts = generateBLangStatements(blockStatement.statements());
this.isInLocalContext = false;
bLBlockStmt.pos = getPosition(blockStatement);
SyntaxKind parent = blockStatement.parent().kind();
if (parent == SyntaxKind.IF_ELSE_STATEMENT || parent == SyntaxKind.ELSE_BLOCK) {
bLBlockStmt.pos = expandLeft(bLBlockStmt.pos, getPosition(blockStatement.parent()));
}
return bLBlockStmt;
}
@Override
public BLangNode transform(RollbackStatementNode rollbackStatementNode) {
BLangRollback rollbackStmt = (BLangRollback) TreeBuilder.createRollbackNode();
rollbackStmt.pos = getPosition(rollbackStatementNode);
if (rollbackStatementNode.expression().isPresent()) {
rollbackStmt.expr = createExpression(rollbackStatementNode.expression().get());
}
return rollbackStmt;
}
@Override
public BLangNode transform(LockStatementNode lockStatementNode) {
BLangLock lockNode = (BLangLock) TreeBuilder.createLockNode();
lockNode.pos = getPosition(lockStatementNode);
BLangBlockStmt lockBlock = (BLangBlockStmt) lockStatementNode.blockStatement().apply(this);
lockBlock.pos = getPosition(lockStatementNode.blockStatement());
lockNode.setBody(lockBlock);
lockStatementNode.onFailClause().ifPresent(onFailClauseNode -> {
lockNode.setOnFailClause(
(org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this)));
});
return lockNode;
}
@Override
public BLangNode transform(TypedescTypeDescriptorNode typedescTypeDescriptorNode) {
BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
refType.typeKind = TypeKind.TYPEDESC;
refType.pos = getPosition(typedescTypeDescriptorNode);
Optional<TypeParameterNode> node = typedescTypeDescriptorNode.typedescTypeParamsNode();
if (node.isPresent()) {
BLangConstrainedType constrainedType = (BLangConstrainedType) TreeBuilder.createConstrainedTypeNode();
constrainedType.type = refType;
constrainedType.constraint = createTypeNode(node.get().typeNode());
constrainedType.pos = refType.pos;
return constrainedType;
}
return refType;
}
@Override
public BLangNode transform(VariableDeclarationNode varDeclaration) {
return (BLangNode) createBLangVarDef(getPosition(varDeclaration), varDeclaration.typedBindingPattern(),
varDeclaration.initializer(), varDeclaration.finalKeyword());
}
public BLangNode transform(XmlTypeDescriptorNode xmlTypeDescriptorNode) {
BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
refType.typeKind = TypeKind.XML;
refType.pos = getPosition(xmlTypeDescriptorNode);
Optional<TypeParameterNode> node = xmlTypeDescriptorNode.xmlTypeParamsNode();
if (node.isPresent()) {
BLangConstrainedType constrainedType = (BLangConstrainedType) TreeBuilder.createConstrainedTypeNode();
constrainedType.type = refType;
constrainedType.constraint = createTypeNode(node.get().typeNode());
constrainedType.pos = getPosition(xmlTypeDescriptorNode);
return constrainedType;
}
return refType;
}
private VariableDefinitionNode createBLangVarDef(Location location,
TypedBindingPatternNode typedBindingPattern,
Optional<io.ballerina.compiler.syntax.tree.ExpressionNode> initializer,
Optional<Token> finalKeyword) {
BindingPatternNode bindingPattern = typedBindingPattern.bindingPattern();
BLangVariable variable = getBLangVariableNode(bindingPattern);
List<Token> qualifiers = new ArrayList<>();
if (finalKeyword.isPresent()) {
qualifiers.add(finalKeyword.get());
}
NodeList<Token> qualifierList = NodeFactory.createNodeList(qualifiers);
switch (bindingPattern.kind()) {
case CAPTURE_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
BLangSimpleVariableDef bLVarDef =
(BLangSimpleVariableDef) TreeBuilder.createSimpleVariableDefinitionNode();
bLVarDef.pos = variable.pos = location;
BLangExpression expr = initializer.isPresent() ? createExpression(initializer.get()) : null;
variable.setInitialExpression(expr);
bLVarDef.setVariable(variable);
if (finalKeyword.isPresent()) {
variable.flagSet.add(Flag.FINAL);
}
TypeDescriptorNode typeDesc = typedBindingPattern.typeDescriptor();
variable.isDeclaredWithVar = isDeclaredWithVar(typeDesc);
if (!variable.isDeclaredWithVar) {
variable.setTypeNode(createTypeNode(typeDesc));
}
return bLVarDef;
case MAPPING_BINDING_PATTERN:
initializeBLangVariable(variable, typedBindingPattern.typeDescriptor(), initializer,
qualifierList);
return createRecordVariableDef(variable);
case LIST_BINDING_PATTERN:
initializeBLangVariable(variable, typedBindingPattern.typeDescriptor(), initializer,
qualifierList);
return createTupleVariableDef(variable);
case ERROR_BINDING_PATTERN:
initializeBLangVariable(variable, typedBindingPattern.typeDescriptor(), initializer,
qualifierList);
return createErrorVariableDef(variable);
default:
throw new RuntimeException(
"Syntax kind is not a valid binding pattern " + typedBindingPattern.bindingPattern().kind());
}
}
private void initializeBLangVariable(BLangVariable var, TypeDescriptorNode type,
Optional<io.ballerina.compiler.syntax.tree.ExpressionNode> initializer,
NodeList<Token> qualifiers) {
for (Token qualifier : qualifiers) {
SyntaxKind kind = qualifier.kind();
if (kind == SyntaxKind.FINAL_KEYWORD) {
markVariableWithFlag(var, Flag.FINAL);
} else if (qualifier.kind() == SyntaxKind.CONFIGURABLE_KEYWORD) {
var.flagSet.add(Flag.CONFIGURABLE);
var.flagSet.add(Flag.FINAL);
if (initializer.get().kind() == SyntaxKind.REQUIRED_EXPRESSION) {
var.flagSet.add(Flag.REQUIRED);
initializer = Optional.empty();
}
} else if (kind == SyntaxKind.ISOLATED_KEYWORD) {
var.flagSet.add(Flag.ISOLATED);
}
}
var.isDeclaredWithVar = isDeclaredWithVar(type);
if (!var.isDeclaredWithVar) {
var.setTypeNode(createTypeNode(type));
}
if (initializer.isPresent()) {
var.setInitialExpression(createExpression(initializer.get()));
}
}
private BLangRecordVariableDef createRecordVariableDef(BLangVariable var) {
BLangRecordVariableDef varDefNode = (BLangRecordVariableDef) TreeBuilder.createRecordVariableDefinitionNode();
varDefNode.pos = var.pos;
varDefNode.setVariable(var);
return varDefNode;
}
private BLangTupleVariableDef createTupleVariableDef(BLangVariable tupleVar) {
BLangTupleVariableDef varDefNode = (BLangTupleVariableDef) TreeBuilder.createTupleVariableDefinitionNode();
varDefNode.pos = tupleVar.pos;
varDefNode.setVariable(tupleVar);
return varDefNode;
}
private BLangErrorVariableDef createErrorVariableDef(BLangVariable errorVar) {
BLangErrorVariableDef varDefNode = (BLangErrorVariableDef) TreeBuilder.createErrorVariableDefinitionNode();
varDefNode.pos = errorVar.pos;
varDefNode.setVariable(errorVar);
return varDefNode;
}
@Override
public BLangNode transform(ExpressionStatementNode expressionStatement) {
SyntaxKind kind = expressionStatement.expression().kind();
switch (kind) {
case ASYNC_SEND_ACTION:
return expressionStatement.expression().apply(this);
default:
BLangExpressionStmt bLExpressionStmt =
(BLangExpressionStmt) TreeBuilder.createExpressionStatementNode();
bLExpressionStmt.expr = createExpression(expressionStatement.expression());
bLExpressionStmt.pos = getPosition(expressionStatement);
return bLExpressionStmt;
}
}
@Override
public BLangNode transform(AsyncSendActionNode asyncSendActionNode) {
BLangWorkerSend workerSendNode = (BLangWorkerSend) TreeBuilder.createWorkerSendNode();
workerSendNode.setWorkerName(createIdentifier(getPosition(asyncSendActionNode.peerWorker()),
asyncSendActionNode.peerWorker().name()));
workerSendNode.expr = createExpression(asyncSendActionNode.expression());
workerSendNode.pos = getPosition(asyncSendActionNode);
return workerSendNode;
}
@Override
public BLangNode transform(WaitActionNode waitActionNode) {
Node waitFutureExpr = waitActionNode.waitFutureExpr();
if (waitFutureExpr.kind() == SyntaxKind.WAIT_FIELDS_LIST) {
return getWaitForAllExpr((WaitFieldsListNode) waitFutureExpr);
}
BLangWaitExpr waitExpr = TreeBuilder.createWaitExpressionNode();
waitExpr.pos = getPosition(waitActionNode);
waitExpr.exprList = Collections.singletonList(createExpression(waitFutureExpr));
return waitExpr;
}
private BLangWaitForAllExpr getWaitForAllExpr(WaitFieldsListNode waitFields) {
BLangWaitForAllExpr bLangWaitForAll = TreeBuilder.createWaitForAllExpressionNode();
List<BLangWaitKeyValue> exprs = new ArrayList<>();
for (Node waitField : waitFields.waitFields()) {
exprs.add(getWaitForAllExpr(waitField));
}
bLangWaitForAll.keyValuePairs = exprs;
bLangWaitForAll.pos = getPosition(waitFields);
return bLangWaitForAll;
}
private BLangWaitKeyValue getWaitForAllExpr(Node waitFields) {
BLangWaitForAllExpr.BLangWaitKeyValue keyValue = TreeBuilder.createWaitKeyValueNode();
keyValue.pos = getPosition(waitFields);
if (waitFields.kind() == SyntaxKind.WAIT_FIELD) {
WaitFieldNode waitFieldNode = (WaitFieldNode) waitFields;
BLangIdentifier key = createIdentifier(waitFieldNode.fieldName().name());
key.setLiteral(false);
keyValue.key = key;
keyValue.valueExpr = createExpression(waitFieldNode.waitFutureExpr());
return keyValue;
}
SimpleNameReferenceNode varName = (SimpleNameReferenceNode) waitFields;
BLangIdentifier key = createIdentifier(varName.name());
key.setLiteral(false);
keyValue.key = key;
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = getPosition(varName);
varRef.variableName = key;
varRef.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
keyValue.keyExpr = varRef;
return keyValue;
}
@Override
public BLangNode transform(StartActionNode startActionNode) {
BLangNode expression = createActionOrExpression(startActionNode.expression());
BLangInvocation invocation;
if (!(expression instanceof BLangWorkerSend)) {
invocation = (BLangInvocation) expression;
} else {
invocation = (BLangInvocation) ((BLangWorkerSend) expression).expr;
expression = ((BLangWorkerSend) expression).expr;
}
if (expression.getKind() == NodeKind.INVOCATION) {
BLangActionInvocation actionInvocation = (BLangActionInvocation) TreeBuilder.createActionInvocation();
actionInvocation.expr = invocation.expr;
actionInvocation.pkgAlias = invocation.pkgAlias;
actionInvocation.name = invocation.name;
actionInvocation.argExprs = invocation.argExprs;
actionInvocation.flagSet = invocation.flagSet;
actionInvocation.pos = getPosition(startActionNode);
invocation = actionInvocation;
}
invocation.async = true;
invocation.annAttachments = applyAll(startActionNode.annotations());
return invocation;
}
@Override
public BLangNode transform(TransactionStatementNode transactionStatementNode) {
BLangTransaction transaction = (BLangTransaction) TreeBuilder.createTransactionNode();
BLangBlockStmt transactionBlock = (BLangBlockStmt) transactionStatementNode.blockStatement().apply(this);
transactionBlock.pos = getPosition(transactionStatementNode.blockStatement());
transaction.setTransactionBody(transactionBlock);
transaction.pos = getPosition(transactionStatementNode);
transactionStatementNode.onFailClause().ifPresent(onFailClauseNode -> {
transaction.setOnFailClause(
(org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this)));
});
return transaction;
}
@Override
public BLangNode transform(PositionalArgumentNode argumentNode) {
return createExpression(argumentNode.expression());
}
@Override
public BLangNode transform(NamedArgumentNode namedArgumentNode) {
BLangNamedArgsExpression namedArg = (BLangNamedArgsExpression) TreeBuilder.createNamedArgNode();
namedArg.pos = getPosition(namedArgumentNode);
namedArg.name = this.createIdentifier(namedArgumentNode.argumentName().name());
namedArg.expr = createExpression(namedArgumentNode.expression());
return namedArg;
}
@Override
public BLangNode transform(RestArgumentNode restArgumentNode) {
BLangRestArgsExpression varArgs = (BLangRestArgsExpression) TreeBuilder.createVarArgsNode();
varArgs.pos = getPosition(restArgumentNode.ellipsis());
varArgs.expr = createExpression(restArgumentNode.expression());
return varArgs;
}
@Override
public BLangNode transform(RequiredParameterNode requiredParameter) {
BLangSimpleVariable simpleVar = createSimpleVar(requiredParameter.paramName(),
requiredParameter.typeName(), requiredParameter.annotations());
simpleVar.pos = getPosition(requiredParameter);
if (requiredParameter.paramName().isPresent()) {
simpleVar.name.pos = getPosition(requiredParameter.paramName().get());
}
simpleVar.flagSet.add(Flag.REQUIRED_PARAM);
return simpleVar;
}
@Override
public BLangNode transform(IncludedRecordParameterNode includedRecordParameterNode) {
BLangSimpleVariable simpleVar = createSimpleVar(includedRecordParameterNode.paramName(),
includedRecordParameterNode.typeName(), includedRecordParameterNode.annotations());
simpleVar.flagSet.add(INCLUDED);
simpleVar.pos = getPosition(includedRecordParameterNode);
if (includedRecordParameterNode.paramName().isPresent()) {
simpleVar.name.pos = getPosition(includedRecordParameterNode.paramName().get());
}
simpleVar.pos = trimLeft(simpleVar.pos, getPosition(includedRecordParameterNode.typeName()));
return simpleVar;
}
@Override
public BLangNode transform(DefaultableParameterNode defaultableParameter) {
BLangSimpleVariable simpleVar = createSimpleVar(defaultableParameter.paramName(),
defaultableParameter.typeName(),
defaultableParameter.annotations());
simpleVar.setInitialExpression(createExpression(defaultableParameter.expression()));
simpleVar.flagSet.add(Flag.DEFAULTABLE_PARAM);
simpleVar.pos = getPosition(defaultableParameter);
return simpleVar;
}
@Override
public BLangNode transform(RestParameterNode restParameter) {
BLangSimpleVariable bLSimpleVar = createSimpleVar(restParameter.paramName(), restParameter.typeName(),
restParameter.annotations());
BLangArrayType bLArrayType = (BLangArrayType) TreeBuilder.createArrayTypeNode();
bLArrayType.elemtype = bLSimpleVar.typeNode;
bLArrayType.dimensions = 1;
bLSimpleVar.typeNode = bLArrayType;
bLArrayType.pos = getPosition(restParameter.typeName());
bLSimpleVar.flagSet.add(Flag.REST_PARAM);
bLSimpleVar.pos = getPosition(restParameter);
return bLSimpleVar;
}
@Override
public BLangNode transform(OptionalTypeDescriptorNode optTypeDescriptor) {
BLangValueType nilTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
nilTypeNode.pos = getPosition(optTypeDescriptor.questionMarkToken());
nilTypeNode.typeKind = TypeKind.NIL;
BLangUnionTypeNode unionTypeNode = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode();
unionTypeNode.memberTypeNodes.add(createTypeNode(optTypeDescriptor.typeDescriptor()));
unionTypeNode.memberTypeNodes.add(nilTypeNode);
unionTypeNode.nullable = true;
unionTypeNode.pos = getPosition(optTypeDescriptor);
return unionTypeNode;
}
@Override
public BLangNode transform(FunctionTypeDescriptorNode functionTypeDescriptorNode) {
BLangFunctionTypeNode functionTypeNode = (BLangFunctionTypeNode) TreeBuilder.createFunctionTypeNode();
functionTypeNode.pos = getPosition(functionTypeDescriptorNode);
functionTypeNode.returnsKeywordExists = true;
if (functionTypeDescriptorNode.functionSignature().isPresent()) {
FunctionSignatureNode funcSignature = functionTypeDescriptorNode.functionSignature().get();
for (ParameterNode child : funcSignature.parameters()) {
SimpleVariableNode param = (SimpleVariableNode) child.apply(this);
if (child.kind() == SyntaxKind.REST_PARAM) {
functionTypeNode.restParam = (BLangSimpleVariable) param;
} else {
functionTypeNode.params.add((BLangVariable) param);
}
}
Optional<ReturnTypeDescriptorNode> retNode = funcSignature.returnTypeDesc();
if (retNode.isPresent()) {
ReturnTypeDescriptorNode returnType = retNode.get();
functionTypeNode.returnTypeNode = createTypeNode(returnType.type());
} else {
BLangValueType bLValueType = (BLangValueType) TreeBuilder.createValueTypeNode();
bLValueType.pos = getPosition(funcSignature);
bLValueType.typeKind = TypeKind.NIL;
functionTypeNode.returnTypeNode = bLValueType;
}
} else {
functionTypeNode.flagSet.add(Flag.ANY_FUNCTION);
}
functionTypeNode.flagSet.add(Flag.PUBLIC);
for (Token token : functionTypeDescriptorNode.qualifierList()) {
if (token.kind() == SyntaxKind.ISOLATED_KEYWORD) {
functionTypeNode.flagSet.add(Flag.ISOLATED);
} else if (token.kind() == SyntaxKind.TRANSACTIONAL_KEYWORD) {
functionTypeNode.flagSet.add(Flag.TRANSACTIONAL);
}
}
return functionTypeNode;
}
@Override
public BLangNode transform(ParameterizedTypeDescriptorNode parameterizedTypeDescNode) {
BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
BLangBuiltInRefTypeNode typeNode =
(BLangBuiltInRefTypeNode) createBuiltInTypeNode(parameterizedTypeDescNode.parameterizedType());
refType.typeKind = typeNode.typeKind;
refType.pos = typeNode.pos;
BLangConstrainedType constrainedType = (BLangConstrainedType) TreeBuilder.createConstrainedTypeNode();
constrainedType.type = refType;
constrainedType.constraint = createTypeNode(parameterizedTypeDescNode.typeParameter().typeNode());
constrainedType.pos = getPosition(parameterizedTypeDescNode);
return constrainedType;
}
@Override
public BLangNode transform(KeySpecifierNode keySpecifierNode) {
BLangTableKeySpecifier tableKeySpecifierNode =
(BLangTableKeySpecifier) TreeBuilder.createTableKeySpecifierNode();
tableKeySpecifierNode.pos = getPosition(keySpecifierNode);
for (Token field : keySpecifierNode.fieldNames()) {
tableKeySpecifierNode.addFieldNameIdentifier(createIdentifier(field));
}
return tableKeySpecifierNode;
}
@Override
public BLangNode transform(KeyTypeConstraintNode keyTypeConstraintNode) {
BLangTableKeyTypeConstraint tableKeyTypeConstraint = new BLangTableKeyTypeConstraint();
tableKeyTypeConstraint.pos = getPosition(keyTypeConstraintNode);
tableKeyTypeConstraint.keyType = createTypeNode(keyTypeConstraintNode.typeParameterNode());
return tableKeyTypeConstraint;
}
@Override
public BLangNode transform(TableTypeDescriptorNode tableTypeDescriptorNode) {
BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
refType.typeKind = TreeUtils.stringToTypeKind(tableTypeDescriptorNode.tableKeywordToken().text());
refType.pos = getPosition(tableTypeDescriptorNode);
BLangTableTypeNode tableTypeNode = (BLangTableTypeNode) TreeBuilder.createTableTypeNode();
tableTypeNode.pos = getPosition(tableTypeDescriptorNode);
tableTypeNode.type = refType;
tableTypeNode.constraint = createTypeNode(tableTypeDescriptorNode.rowTypeParameterNode());
if (tableTypeDescriptorNode.keyConstraintNode().isPresent()) {
Node constraintNode = tableTypeDescriptorNode.keyConstraintNode().get();
if (constraintNode.kind() == SyntaxKind.KEY_TYPE_CONSTRAINT) {
tableTypeNode.tableKeyTypeConstraint = (BLangTableKeyTypeConstraint) constraintNode.apply(this);
} else if (constraintNode.kind() == SyntaxKind.KEY_SPECIFIER) {
tableTypeNode.tableKeySpecifier = (BLangTableKeySpecifier) constraintNode.apply(this);
}
}
tableTypeNode.isTypeInlineDefined = checkIfAnonymous(tableTypeDescriptorNode);
return tableTypeNode;
}
@Override
public BLangNode transform(SimpleNameReferenceNode simpleNameRefNode) {
BLangUserDefinedType bLUserDefinedType = new BLangUserDefinedType();
bLUserDefinedType.pos = getPosition(simpleNameRefNode);
bLUserDefinedType.typeName =
createIdentifier(simpleNameRefNode.name());
bLUserDefinedType.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
return bLUserDefinedType;
}
@Override
public BLangNode transform(QualifiedNameReferenceNode qualifiedNameReferenceNode) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = getPosition(qualifiedNameReferenceNode);
varRef.variableName = createIdentifier(qualifiedNameReferenceNode.identifier());
varRef.pkgAlias = createIdentifier(qualifiedNameReferenceNode.modulePrefix());
return varRef;
}
@Override
public BLangNode transform(XMLProcessingInstruction xmlProcessingInstruction) {
BLangXMLProcInsLiteral xmlProcInsLiteral =
(BLangXMLProcInsLiteral) TreeBuilder.createXMLProcessingIntsructionLiteralNode();
if (xmlProcessingInstruction.data().isEmpty()) {
BLangLiteral emptyLiteral = createEmptyLiteral();
emptyLiteral.pos = getPosition(xmlProcessingInstruction);
xmlProcInsLiteral.dataFragments.add(emptyLiteral);
} else {
for (Node dataNode : xmlProcessingInstruction.data()) {
xmlProcInsLiteral.dataFragments.add(createExpression(dataNode));
}
}
XMLNameNode target = xmlProcessingInstruction.target();
if (target.kind() == SyntaxKind.XML_SIMPLE_NAME) {
xmlProcInsLiteral.target = createSimpleLiteral(((XMLSimpleNameNode) target).name());
} else {
xmlProcInsLiteral.target = createSimpleLiteral(((XMLQualifiedNameNode) target).prefix());
}
xmlProcInsLiteral.pos = getPosition(xmlProcessingInstruction);
return xmlProcInsLiteral;
}
@Override
public BLangNode transform(XMLComment xmlComment) {
BLangXMLCommentLiteral xmlCommentLiteral = (BLangXMLCommentLiteral) TreeBuilder.createXMLCommentLiteralNode();
Location pos = getPosition(xmlComment);
if (xmlComment.content().isEmpty()) {
BLangLiteral emptyLiteral = createEmptyLiteral();
emptyLiteral.pos = pos;
xmlCommentLiteral.textFragments.add(emptyLiteral);
} else {
for (Node commentNode : xmlComment.content()) {
xmlCommentLiteral.textFragments.add(createExpression(commentNode));
}
}
xmlCommentLiteral.pos = pos;
return xmlCommentLiteral;
}
@Override
public BLangNode transform(XMLElementNode xmlElementNode) {
BLangXMLElementLiteral xmlElement = (BLangXMLElementLiteral) TreeBuilder.createXMLElementLiteralNode();
xmlElement.startTagName = createExpression(xmlElementNode.startTag());
xmlElement.endTagName = createExpression(xmlElementNode.endTag());
for (Node node : xmlElementNode.content()) {
if (node.kind() == SyntaxKind.XML_TEXT) {
xmlElement.children.add(createSimpleLiteral(((XMLTextNode) node).content()));
continue;
}
xmlElement.children.add(createExpression(node));
}
for (XMLAttributeNode attribute : xmlElementNode.startTag().attributes()) {
xmlElement.attributes.add((BLangXMLAttribute) attribute.apply(this));
}
xmlElement.pos = getPosition(xmlElementNode);
xmlElement.isRoot = true;
return xmlElement;
}
@Override
public BLangNode transform(XMLAttributeNode xmlAttributeNode) {
BLangXMLAttribute xmlAttribute = (BLangXMLAttribute) TreeBuilder.createXMLAttributeNode();
xmlAttribute.value = (BLangXMLQuotedString) xmlAttributeNode.value().apply(this);
xmlAttribute.name = createExpression(xmlAttributeNode.attributeName());
xmlAttribute.pos = getPosition(xmlAttributeNode);
return xmlAttribute;
}
@Override
public BLangNode transform(ByteArrayLiteralNode byteArrayLiteralNode) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.pos = getPosition(byteArrayLiteralNode);
literal.type = symTable.getTypeFromTag(TypeTags.BYTE_ARRAY);
literal.type.tag = TypeTags.BYTE_ARRAY;
literal.value = getValueFromByteArrayNode(byteArrayLiteralNode);
literal.originalValue = String.valueOf(literal.value);
return literal;
}
@Override
public BLangNode transform(XMLAttributeValue xmlAttributeValue) {
BLangXMLQuotedString quotedString = (BLangXMLQuotedString) TreeBuilder.createXMLQuotedStringNode();
quotedString.pos = getPosition(xmlAttributeValue);
if (xmlAttributeValue.startQuote().kind() == SyntaxKind.SINGLE_QUOTE_TOKEN) {
quotedString.quoteType = QuoteType.SINGLE_QUOTE;
} else {
quotedString.quoteType = QuoteType.DOUBLE_QUOTE;
}
if (xmlAttributeValue.value().isEmpty()) {
BLangLiteral emptyLiteral = createEmptyLiteral();
emptyLiteral.pos = getPosition(xmlAttributeValue);
quotedString.textFragments.add(emptyLiteral);
} else if (xmlAttributeValue.value().size() == 1 &&
xmlAttributeValue.value().get(0).kind() == SyntaxKind.INTERPOLATION) {
quotedString.textFragments.add(createExpression(xmlAttributeValue.value().get(0)));
BLangLiteral emptyLiteral = createEmptyLiteral();
emptyLiteral.pos = getPosition(xmlAttributeValue);
quotedString.textFragments.add(emptyLiteral);
} else {
for (Node value : xmlAttributeValue.value()) {
if (value.kind() == SyntaxKind.XML_TEXT_CONTENT) {
Token token = (Token) value;
String normalizedValue = XmlFactory.XMLTextUnescape.unescape(token.text());
quotedString.textFragments.add(createStringLiteral(normalizedValue, getPosition(value)));
} else {
quotedString.textFragments.add(createExpression(value));
}
}
}
return quotedString;
}
@Override
public BLangNode transform(XMLStartTagNode startTagNode) {
return startTagNode.name().apply(this);
}
@Override
public BLangNode transform(XMLEndTagNode endTagNode) {
return endTagNode.name().apply(this);
}
@Override
public BLangNode transform(XMLTextNode xmlTextNode) {
return createExpression(xmlTextNode.content());
}
private BLangNode createXMLEmptyLiteral(TemplateExpressionNode expressionNode) {
BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode();
xmlTextLiteral.pos = getPosition(expressionNode);
xmlTextLiteral.textFragments.add(createEmptyStringLiteral(xmlTextLiteral.pos));
return xmlTextLiteral;
}
private BLangNode createXMLTextLiteral(List<Node> expressionNode) {
BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode();
xmlTextLiteral.pos = getPosition(expressionNode.get(0));
for (Node node : expressionNode) {
xmlTextLiteral.textFragments.add(createExpression(node));
}
xmlTextLiteral.textFragments.add(createEmptyStringLiteral(xmlTextLiteral.pos));
return xmlTextLiteral;
}
private BLangNode createXMLTextLiteral(Node expressionNode) {
BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode();
xmlTextLiteral.pos = getPosition(expressionNode);
xmlTextLiteral.textFragments.add(createExpression(expressionNode));
return xmlTextLiteral;
}
@Override
public BLangNode transform(XMLNamespaceDeclarationNode xmlnsDeclNode) {
BLangXMLNS xmlns = (BLangXMLNS) TreeBuilder.createXMLNSNode();
BLangIdentifier prefixIdentifier = createIdentifier(xmlnsDeclNode.namespacePrefix().orElse(null));
BLangExpression namespaceUri = createExpression(xmlnsDeclNode.namespaceuri());
xmlns.namespaceURI = namespaceUri;
xmlns.prefix = prefixIdentifier;
xmlns.pos = getPosition(xmlnsDeclNode);
BLangXMLNSStatement xmlnsStmt = (BLangXMLNSStatement) TreeBuilder.createXMLNSDeclrStatementNode();
xmlnsStmt.xmlnsDecl = xmlns;
xmlnsStmt.pos = getPosition(xmlnsDeclNode);
return xmlnsStmt;
}
@Override
public BLangNode transform(ModuleXMLNamespaceDeclarationNode xmlnsDeclNode) {
BLangXMLNS xmlns = (BLangXMLNS) TreeBuilder.createXMLNSNode();
BLangIdentifier prefixIdentifier = createIdentifier(xmlnsDeclNode.namespacePrefix().orElse(null));
BLangExpression namespaceUri = createExpression(xmlnsDeclNode.namespaceuri());
xmlns.namespaceURI = namespaceUri;
xmlns.prefix = prefixIdentifier;
xmlns.pos = getPosition(xmlnsDeclNode);
return xmlns;
}
@Override
public BLangNode transform(XMLQualifiedNameNode xmlQualifiedNameNode) {
BLangXMLQName xmlName = (BLangXMLQName) TreeBuilder.createXMLQNameNode();
xmlName.localname = createIdentifier(getPosition(xmlQualifiedNameNode.name()),
xmlQualifiedNameNode.name().name());
xmlName.prefix = createIdentifier(getPosition(xmlQualifiedNameNode.prefix()),
xmlQualifiedNameNode.prefix().name());
xmlName.pos = getPosition(xmlQualifiedNameNode);
return xmlName;
}
@Override
public BLangNode transform(XMLSimpleNameNode xmlSimpleNameNode) {
BLangXMLQName xmlName = (BLangXMLQName) TreeBuilder.createXMLQNameNode();
xmlName.localname = createIdentifier(xmlSimpleNameNode.name());
xmlName.prefix = createIdentifier(null, "");
xmlName.pos = getPosition(xmlSimpleNameNode);
return xmlName;
}
@Override
public BLangNode transform(XMLEmptyElementNode xMLEmptyElementNode) {
BLangXMLElementLiteral xmlEmptyElement = (BLangXMLElementLiteral) TreeBuilder.createXMLElementLiteralNode();
xmlEmptyElement.startTagName = createExpression(xMLEmptyElementNode.name());
for (XMLAttributeNode attribute : xMLEmptyElementNode.attributes()) {
xmlEmptyElement.attributes.add((BLangXMLAttribute) attribute.apply(this));
}
xmlEmptyElement.pos = getPosition(xMLEmptyElementNode);
return xmlEmptyElement;
}
@Override
public BLangNode transform(RemoteMethodCallActionNode remoteMethodCallActionNode) {
BLangInvocation.BLangActionInvocation bLangActionInvocation = (BLangInvocation.BLangActionInvocation)
TreeBuilder.createActionInvocation();
bLangActionInvocation.expr = createExpression(remoteMethodCallActionNode.expression());
bLangActionInvocation.argExprs = applyAll(remoteMethodCallActionNode.arguments());
BLangNameReference nameReference = createBLangNameReference(remoteMethodCallActionNode.methodName().name());
bLangActionInvocation.name = (BLangIdentifier) nameReference.name;
bLangActionInvocation.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
bLangActionInvocation.pos = getPosition(remoteMethodCallActionNode);
return bLangActionInvocation;
}
@Override
public BLangNode transform(StreamTypeDescriptorNode streamTypeDescriptorNode) {
BLangType constraint, error = null;
Location pos = getPosition(streamTypeDescriptorNode);
Optional<Node> paramsNode = streamTypeDescriptorNode.streamTypeParamsNode();
boolean hasConstraint = paramsNode.isPresent();
if (!hasConstraint) {
constraint = addValueType(pos, TypeKind.ANY);
} else {
StreamTypeParamsNode params = (StreamTypeParamsNode) paramsNode.get();
if (params.rightTypeDescNode().isPresent()) {
error = createTypeNode(params.rightTypeDescNode().get());
}
constraint = createTypeNode(params.leftTypeDescNode());
}
BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
refType.typeKind = TypeKind.STREAM;
refType.pos = pos;
BLangStreamType streamType = (BLangStreamType) TreeBuilder.createStreamTypeNode();
streamType.type = refType;
streamType.constraint = constraint;
streamType.error = error;
streamType.pos = pos;
return streamType;
}
@Override
public BLangNode transform(ArrayTypeDescriptorNode arrayTypeDescriptorNode) {
int dimensions = 1;
List<BLangExpression> sizes = new ArrayList<>();
Location position = getPosition(arrayTypeDescriptorNode);
while (true) {
if (arrayTypeDescriptorNode.arrayLength().isEmpty()) {
sizes.add(new BLangLiteral(OPEN_ARRAY_INDICATOR, symTable.intType));
} else {
Node keyExpr = arrayTypeDescriptorNode.arrayLength().get();
if (keyExpr.kind() == SyntaxKind.NUMERIC_LITERAL) {
Token literalToken = ((BasicLiteralNode) keyExpr).literalToken();
if (literalToken.kind() == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN) {
sizes.add(new BLangLiteral(Integer.parseInt(literalToken.text()), symTable.intType));
} else {
sizes.add(new BLangLiteral(Integer.parseInt(literalToken.text(), 16), symTable.intType));
}
} else if (keyExpr.kind() == SyntaxKind.ASTERISK_LITERAL) {
sizes.add(new BLangLiteral(INFERRED_ARRAY_INDICATOR, symTable.intType));
} else {
sizes.add(createExpression(keyExpr));
}
}
if (arrayTypeDescriptorNode.memberTypeDesc().kind() != SyntaxKind.ARRAY_TYPE_DESC) {
break;
}
arrayTypeDescriptorNode = (ArrayTypeDescriptorNode) arrayTypeDescriptorNode.memberTypeDesc();
dimensions++;
}
BLangArrayType arrayTypeNode = (BLangArrayType) TreeBuilder.createArrayTypeNode();
arrayTypeNode.pos = position;
arrayTypeNode.elemtype = createTypeNode(arrayTypeDescriptorNode.memberTypeDesc());
arrayTypeNode.dimensions = dimensions;
arrayTypeNode.sizes = sizes.toArray(new BLangExpression[0]);
return arrayTypeNode;
}
public BLangNode transform(EnumDeclarationNode enumDeclarationNode) {
Boolean publicQualifier = false;
if (enumDeclarationNode.qualifier().isPresent() && enumDeclarationNode.qualifier().get().kind()
== SyntaxKind.PUBLIC_KEYWORD) {
publicQualifier = true;
}
for (Node member : enumDeclarationNode.enumMemberList()) {
EnumMemberNode enumMember = (EnumMemberNode) member;
if (enumMember.identifier().isMissing()) {
continue;
}
addToTop(transformEnumMember(enumMember, publicQualifier));
}
BLangTypeDefinition bLangTypeDefinition = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
if (publicQualifier) {
bLangTypeDefinition.flagSet.add(Flag.PUBLIC);
}
bLangTypeDefinition.flagSet.add(Flag.ENUM);
bLangTypeDefinition.setName((BLangIdentifier) transform(enumDeclarationNode.identifier()));
bLangTypeDefinition.pos = getPosition(enumDeclarationNode);
BLangUnionTypeNode bLangUnionTypeNode = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode();
bLangUnionTypeNode.pos = bLangTypeDefinition.pos;
for (Node member : enumDeclarationNode.enumMemberList()) {
Node enumMemberIdentifier = ((EnumMemberNode) member).identifier();
if (enumMemberIdentifier.isMissing()) {
continue;
}
bLangUnionTypeNode.memberTypeNodes.add(createTypeNode(enumMemberIdentifier));
}
Collections.reverse(bLangUnionTypeNode.memberTypeNodes);
bLangTypeDefinition.setTypeNode(bLangUnionTypeNode);
bLangTypeDefinition.annAttachments = applyAll(getAnnotations(enumDeclarationNode.metadata()));
bLangTypeDefinition.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(enumDeclarationNode.metadata()));
return bLangTypeDefinition;
}
public BLangConstant transformEnumMember(EnumMemberNode member, Boolean publicQualifier) {
BLangConstant bLangConstant = (BLangConstant) TreeBuilder.createConstantNode();
bLangConstant.pos = getPosition(member);
bLangConstant.flagSet.add(Flag.CONSTANT);
bLangConstant.flagSet.add(Flag.ENUM_MEMBER);
if (publicQualifier) {
bLangConstant.flagSet.add(Flag.PUBLIC);
}
bLangConstant.annAttachments = applyAll(getAnnotations(member.metadata()));
bLangConstant.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(member.metadata()));
bLangConstant.setName((BLangIdentifier) transform(member.identifier()));
BLangExpression deepLiteral;
if (member.constExprNode().isPresent()) {
BLangExpression expression = createExpression(member.constExprNode().orElse(null));
bLangConstant.setInitialExpression(expression);
deepLiteral = createExpression(member.constExprNode().orElse(null));
} else {
BLangLiteral literal = createSimpleLiteral(member.identifier());
bLangConstant.setInitialExpression(literal);
deepLiteral = createSimpleLiteral(member.identifier());
}
BLangValueType typeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
typeNode.pos = symTable.builtinPos;
typeNode.typeKind = TypeKind.STRING;
bLangConstant.setTypeNode(typeNode);
if (deepLiteral instanceof BLangLiteral) {
BLangLiteral literal = (BLangLiteral) deepLiteral;
if (!literal.originalValue.equals("")) {
BLangFiniteTypeNode typeNodeAssociated = (BLangFiniteTypeNode) TreeBuilder.createFiniteTypeNode();
literal.originalValue = null;
typeNodeAssociated.addValue(deepLiteral);
bLangConstant.associatedTypeDefinition = createTypeDefinitionWithTypeNode(typeNodeAssociated);
} else {
bLangConstant.associatedTypeDefinition = null;
}
} else {
BLangFiniteTypeNode typeNodeAssociated = (BLangFiniteTypeNode) TreeBuilder.createFiniteTypeNode();
typeNodeAssociated.addValue(deepLiteral);
bLangConstant.associatedTypeDefinition = createTypeDefinitionWithTypeNode(typeNodeAssociated);
}
return bLangConstant;
}
@Override
public BLangNode transform(QueryExpressionNode queryExprNode) {
BLangQueryExpr queryExpr = (BLangQueryExpr) TreeBuilder.createQueryExpressionNode();
queryExpr.pos = getPosition(queryExprNode);
BLangFromClause fromClause = (BLangFromClause) queryExprNode.queryPipeline().fromClause().apply(this);
queryExpr.queryClauseList.add(fromClause);
for (Node clauseNode : queryExprNode.queryPipeline().intermediateClauses()) {
queryExpr.queryClauseList.add(clauseNode.apply(this));
}
BLangSelectClause selectClause = (BLangSelectClause) queryExprNode.selectClause().apply(this);
queryExpr.queryClauseList.add(selectClause);
Optional<OnConflictClauseNode> onConflict = queryExprNode.onConflictClause();
onConflict.ifPresent(onConflictClauseNode -> queryExpr.queryClauseList.add(onConflictClauseNode.apply(this)));
boolean isTable = false;
boolean isStream = false;
Optional<QueryConstructTypeNode> optionalQueryConstructTypeNode = queryExprNode.queryConstructType();
if (optionalQueryConstructTypeNode.isPresent()) {
QueryConstructTypeNode queryConstructTypeNode = optionalQueryConstructTypeNode.get();
isTable = queryConstructTypeNode.keyword().kind() == SyntaxKind.TABLE_KEYWORD;
isStream = queryConstructTypeNode.keyword().kind() == SyntaxKind.STREAM_KEYWORD;
if (queryConstructTypeNode.keySpecifier().isPresent()) {
for (IdentifierToken fieldNameNode : queryConstructTypeNode.keySpecifier().get().fieldNames()) {
queryExpr.fieldNameIdentifierList.add(createIdentifier(getPosition(fieldNameNode), fieldNameNode));
}
}
}
queryExpr.isStream = isStream;
queryExpr.isTable = isTable;
return queryExpr;
}
public BLangNode transform(OnFailClauseNode onFailClauseNode) {
Location pos = getPosition(onFailClauseNode);
BLangSimpleVariableDef variableDefinitionNode = (BLangSimpleVariableDef) TreeBuilder.
createSimpleVariableDefinitionNode();
BLangSimpleVariable var = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
boolean isDeclaredWithVar = onFailClauseNode.typeDescriptor().kind() == SyntaxKind.VAR_TYPE_DESC;
var.isDeclaredWithVar = isDeclaredWithVar;
if (!isDeclaredWithVar) {
var.setTypeNode(createTypeNode(onFailClauseNode.typeDescriptor()));
}
var.pos = getPosition(onFailClauseNode);
var.setName(this.createIdentifier(onFailClauseNode.failErrorName()));
var.name.pos = getPosition(onFailClauseNode.failErrorName());
variableDefinitionNode.setVariable(var);
variableDefinitionNode.pos = var.name.pos;
BLangOnFailClause onFailClause = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode();
onFailClause.pos = pos;
onFailClause.isDeclaredWithVar = isDeclaredWithVar;
markVariableWithFlag(variableDefinitionNode.getVariable(), Flag.FINAL);
onFailClause.variableDefinitionNode = variableDefinitionNode;
BLangBlockStmt blockNode = (BLangBlockStmt) transform(onFailClauseNode.blockStatement());
blockNode.pos = getPosition(onFailClauseNode);
onFailClause.body = blockNode;
return onFailClause;
}
@Override
public BLangNode transform(LetClauseNode letClauseNode) {
BLangLetClause bLLetClause = (BLangLetClause) TreeBuilder.createLetClauseNode();
bLLetClause.pos = getPosition(letClauseNode);
List<BLangLetVariable> letVars = new ArrayList<>();
for (LetVariableDeclarationNode letVarDeclr : letClauseNode.letVarDeclarations()) {
BLangLetVariable letVar = createLetVariable(letVarDeclr);
letVar.definitionNode.getVariable().addFlag(Flag.FINAL);
letVars.add(letVar);
}
if (!letVars.isEmpty()) {
bLLetClause.letVarDeclarations = letVars;
}
return bLLetClause;
}
@Override
public BLangNode transform(FromClauseNode fromClauseNode) {
BLangFromClause fromClause = (BLangFromClause) TreeBuilder.createFromClauseNode();
fromClause.pos = getPosition(fromClauseNode);
fromClause.collection = createExpression(fromClauseNode.expression());
TypedBindingPatternNode bindingPatternNode = fromClauseNode.typedBindingPattern();
fromClause.variableDefinitionNode = createBLangVarDef(getPosition(bindingPatternNode), bindingPatternNode,
Optional.empty(), Optional.empty());
boolean isDeclaredWithVar = bindingPatternNode.typeDescriptor().kind() == SyntaxKind.VAR_TYPE_DESC;
fromClause.isDeclaredWithVar = isDeclaredWithVar;
return fromClause;
}
@Override
public BLangNode transform(WhereClauseNode whereClauseNode) {
BLangWhereClause whereClause = (BLangWhereClause) TreeBuilder.createWhereClauseNode();
whereClause.pos = getPosition(whereClauseNode);
whereClause.expression = createExpression(whereClauseNode.expression());
return whereClause;
}
@Override
public BLangNode transform(SelectClauseNode selectClauseNode) {
BLangSelectClause selectClause = (BLangSelectClause) TreeBuilder.createSelectClauseNode();
selectClause.pos = getPosition(selectClauseNode);
selectClause.expression = createExpression(selectClauseNode.expression());
return selectClause;
}
@Override
public BLangNode transform(OnConflictClauseNode onConflictClauseNode) {
BLangOnConflictClause onConflictClause = (BLangOnConflictClause) TreeBuilder.createOnConflictClauseNode();
onConflictClause.pos = getPosition(onConflictClauseNode);
onConflictClause.expression = createExpression(onConflictClauseNode.expression());
return onConflictClause;
}
@Override
public BLangNode transform(LimitClauseNode limitClauseNode) {
BLangLimitClause selectClause = (BLangLimitClause) TreeBuilder.createLimitClauseNode();
selectClause.pos = getPosition(limitClauseNode);
selectClause.expression = createExpression(limitClauseNode.expression());
return selectClause;
}
@Override
public BLangNode transform(OnClauseNode onClauseNode) {
BLangOnClause onClause = (BLangOnClause) TreeBuilder.createOnClauseNode();
onClause.pos = getPosition(onClauseNode);
onClause.lhsExpr = createExpression(onClauseNode.lhsExpression());
onClause.rhsExpr = createExpression(onClauseNode.rhsExpression());
return onClause;
}
@Override
public BLangNode transform(JoinClauseNode joinClauseNode) {
BLangJoinClause joinClause = (BLangJoinClause) TreeBuilder.createJoinClauseNode();
joinClause.pos = getPosition(joinClauseNode);
TypedBindingPatternNode typedBindingPattern = joinClauseNode.typedBindingPattern();
joinClause.variableDefinitionNode = createBLangVarDef(getPosition(joinClauseNode),
typedBindingPattern, Optional.empty(), Optional.empty());
joinClause.collection = createExpression(joinClauseNode.expression());
joinClause.isDeclaredWithVar = typedBindingPattern.typeDescriptor().kind() == SyntaxKind.VAR_TYPE_DESC;
joinClause.isOuterJoin = joinClauseNode.outerKeyword().isPresent();
OnClauseNode onClauseNode = joinClauseNode.joinOnCondition();
BLangOnClause onClause = (BLangOnClause) TreeBuilder.createOnClauseNode();
onClause.pos = getPosition(onClauseNode);
if (!onClauseNode.equalsKeyword().isMissing()) {
onClause.equalsKeywordPos = getPosition(onClauseNode.equalsKeyword());
}
onClause.lhsExpr = createExpression(onClauseNode.lhsExpression());
onClause.rhsExpr = createExpression(onClauseNode.rhsExpression());
joinClause.onClause = onClause;
return joinClause;
}
@Override
public BLangNode transform(OrderByClauseNode orderByClauseNode) {
BLangOrderByClause orderByClause = (BLangOrderByClause) TreeBuilder.createOrderByClauseNode();
orderByClause.pos = getPosition(orderByClauseNode);
for (OrderKeyNode orderKeyNode : orderByClauseNode.orderKey()) {
orderByClause.addOrderKey(createOrderKey(orderKeyNode));
}
return orderByClause;
}
public BLangOrderKey createOrderKey(OrderKeyNode orderKeyNode) {
BLangOrderKey orderKey = (BLangOrderKey) TreeBuilder.createOrderKeyNode();
orderKey.pos = getPosition(orderKeyNode);
orderKey.expression = createExpression(orderKeyNode.expression());
if (orderKeyNode.orderDirection().isPresent() &&
orderKeyNode.orderDirection().get().text().equals("descending")) {
orderKey.isAscending = false;
} else {
orderKey.isAscending = true;
}
return orderKey;
}
@Override
public BLangNode transform(IntersectionTypeDescriptorNode intersectionTypeDescriptorNode) {
BLangType lhsType = (BLangType) createTypeNode(intersectionTypeDescriptorNode.leftTypeDesc());
BLangType rhsType = (BLangType) createTypeNode(intersectionTypeDescriptorNode.rightTypeDesc());
BLangIntersectionTypeNode intersectionType;
if (rhsType.getKind() == NodeKind.INTERSECTION_TYPE_NODE) {
intersectionType = (BLangIntersectionTypeNode) rhsType;
intersectionType.constituentTypeNodes.add(0, lhsType);
} else if (lhsType.getKind() == NodeKind.INTERSECTION_TYPE_NODE) {
intersectionType = (BLangIntersectionTypeNode) lhsType;
intersectionType.constituentTypeNodes.add(rhsType);
} else {
intersectionType = (BLangIntersectionTypeNode) TreeBuilder.createIntersectionTypeNode();
intersectionType.constituentTypeNodes.add(lhsType);
intersectionType.constituentTypeNodes.add(rhsType);
}
intersectionType.pos = getPosition(intersectionTypeDescriptorNode);
return intersectionType;
}
@Override
public BLangNode transform(InferredTypedescDefaultNode inferDefaultValueNode) {
BLangInferredTypedescDefaultNode inferTypedescExpr =
(BLangInferredTypedescDefaultNode) TreeBuilder.createInferTypedescExpressionNode();
inferTypedescExpr.pos = getPosition(inferDefaultValueNode);
return inferTypedescExpr;
}
@Override
protected BLangNode transformSyntaxNode(Node node) {
throw new RuntimeException("Node not supported: " + node.getClass().getSimpleName());
}
@Override
public BLangNode transform(ServiceDeclarationNode serviceDeclarationNode) {
Location pos = getPositionWithoutMetadata(serviceDeclarationNode);
BLangClassDefinition anonClassDef = transformObjectCtorExpressionBody(serviceDeclarationNode.members());
anonClassDef.isServiceDecl = true;
anonClassDef.pos = pos;
anonClassDef.flagSet.add(SERVICE);
setClassQualifiers(serviceDeclarationNode.qualifiers(), anonClassDef);
List<IdentifierNode> absResourcePathPath = new ArrayList<>();
NodeList<Node> pathList = serviceDeclarationNode.absoluteResourcePath();
BLangLiteral serviceNameLiteral = null;
if (pathList.size() == 1 && pathList.get(0).kind() == SyntaxKind.STRING_LITERAL) {
serviceNameLiteral = (BLangLiteral) createExpression(pathList.get(0));
} else {
for (var token : pathList) {
String text = ((Token) token).text();
if (pathList.size() == 1 && text.equals("/")) {
absResourcePathPath.add(createIdentifier((Token) token));
} else if (!text.equals("/")) {
absResourcePathPath.add(createIdentifier((Token) token));
}
}
}
String genName = anonymousModelHelper.getNextAnonymousTypeKey(packageID);
IdentifierNode anonTypeGenName = createIdentifier(pos, genName);
anonClassDef.setName(anonTypeGenName);
anonClassDef.flagSet.add(Flag.PUBLIC);
Optional<TypeDescriptorNode> typeReference = serviceDeclarationNode.typeDescriptor();
typeReference.ifPresent(typeReferenceNode -> {
BLangType typeNode = createTypeNode(typeReferenceNode);
anonClassDef.typeRefs.add(typeNode);
});
anonClassDef.annAttachments = applyAll(getAnnotations(serviceDeclarationNode.metadata()));
anonClassDef.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(serviceDeclarationNode.metadata()));
addToTop(anonClassDef);
BLangIdentifier identifier = (BLangIdentifier) TreeBuilder.createIdentifierNode();
BLangUserDefinedType userDefinedType = createUserDefinedType(pos, identifier, anonClassDef.name);
BLangTypeInit initNode = (BLangTypeInit) TreeBuilder.createInitNode();
initNode.pos = pos;
initNode.userDefinedType = userDefinedType;
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier pkgAlias = createIdentifier(pos, "");
BLangNameReference nameReference = new BLangNameReference(pos, null, pkgAlias, createIdentifier(pos, genName));
invocationNode.name = (BLangIdentifier) nameReference.name;
invocationNode.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
initNode.argsExpr.addAll(invocationNode.argExprs);
initNode.initInvocation = invocationNode;
BLangSimpleVariable serviceVariable = createServiceVariable(pos, anonClassDef, initNode);
List<BLangExpression> exprs = new ArrayList<>();
for (var exp : serviceDeclarationNode.expressions()) {
exprs.add(createExpression(exp));
}
BLangService service = (BLangService) TreeBuilder.createServiceNode();
service.serviceVariable = serviceVariable;
service.attachedExprs = exprs;
service.serviceClass = anonClassDef;
service.absoluteResourcePath = absResourcePathPath;
service.serviceNameLiteral = serviceNameLiteral;
service.annAttachments = anonClassDef.annAttachments;
service.pos = pos;
service.name = createIdentifier(pos, anonymousModelHelper.getNextAnonymousServiceVarKey(packageID));
return service;
}
private BLangSimpleVariable createServiceVariable(Location pos, BLangClassDefinition annonClassDef,
BLangTypeInit initNode) {
BLangUserDefinedType typeName = createUserDefinedType(pos,
(BLangIdentifier) TreeBuilder.createIdentifierNode(), annonClassDef.name);
BLangSimpleVariable serviceInstance =
(BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
serviceInstance.typeNode = typeName;
String serviceVarName = anonymousModelHelper.getNextAnonymousServiceVarKey(packageID);
serviceInstance.name = createIdentifier(pos, serviceVarName);
serviceInstance.expr = initNode;
serviceInstance.internal = true;
return serviceInstance;
}
@Override
public BLangNode transform(ClassDefinitionNode classDefinitionNode) {
BLangClassDefinition blangClass = (BLangClassDefinition) TreeBuilder.createClassDefNode();
blangClass.pos = getPositionWithoutMetadata(classDefinitionNode);
blangClass.annAttachments = applyAll(getAnnotations(classDefinitionNode.metadata()));
BLangIdentifier identifierNode = createIdentifier(classDefinitionNode.className());
blangClass.setName(identifierNode);
blangClass.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(classDefinitionNode.metadata()));
classDefinitionNode.visibilityQualifier().ifPresent(visibilityQual -> {
if (visibilityQual.kind() == SyntaxKind.PUBLIC_KEYWORD) {
blangClass.flagSet.add(Flag.PUBLIC);
}
});
setClassQualifiers(classDefinitionNode.classTypeQualifiers(), blangClass);
NodeList<Node> members = classDefinitionNode.members();
for (Node node : members) {
BLangNode bLangNode = node.apply(this);
if (bLangNode.getKind() == NodeKind.FUNCTION || bLangNode.getKind() == NodeKind.RESOURCE_FUNC) {
BLangFunction bLangFunction = (BLangFunction) bLangNode;
bLangFunction.attachedFunction = true;
bLangFunction.flagSet.add(Flag.ATTACHED);
if (Names.USER_DEFINED_INIT_SUFFIX.value.equals(bLangFunction.name.value)) {
if (blangClass.initFunction == null) {
bLangFunction.objInitFunction = true;
blangClass.initFunction = bLangFunction;
} else {
blangClass.addFunction(bLangFunction);
}
} else {
blangClass.addFunction(bLangFunction);
}
} else if (bLangNode.getKind() == NodeKind.VARIABLE) {
blangClass.addField((BLangSimpleVariable) bLangNode);
} else if (bLangNode.getKind() == NodeKind.USER_DEFINED_TYPE) {
blangClass.addTypeReference((BLangType) bLangNode);
}
}
return blangClass;
}
@Override
public BLangNode transform(RetryStatementNode retryStatementNode) {
BLangRetrySpec retrySpec = createRetrySpec(retryStatementNode);
Location pos = getPosition(retryStatementNode);
StatementNode retryBody = retryStatementNode.retryBody();
if (retryBody.kind() == SyntaxKind.TRANSACTION_STATEMENT) {
BLangRetryTransaction retryTransaction = (BLangRetryTransaction) TreeBuilder.createRetryTransactionNode();
retryTransaction.pos = pos;
retryTransaction.setRetrySpec(retrySpec);
retryTransaction.setTransaction((BLangTransaction) retryBody.apply(this));
return retryTransaction;
}
BLangRetry retryNode = (BLangRetry) TreeBuilder.createRetryNode();
retryNode.pos = pos;
retryNode.setRetrySpec(retrySpec);
BLangBlockStmt retryBlock = (BLangBlockStmt) retryBody.apply(this);
retryNode.setRetryBody(retryBlock);
retryStatementNode.onFailClause().ifPresent(onFailClauseNode -> {
retryNode.setOnFailClause(
(org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this)));
});
return retryNode;
}
private BLangRetrySpec createRetrySpec(RetryStatementNode retryStatementNode) {
BLangRetrySpec retrySpec = (BLangRetrySpec) TreeBuilder.createRetrySpecNode();
if (retryStatementNode.typeParameter().isPresent()) {
TypeParameterNode typeParam = retryStatementNode.typeParameter().get();
retrySpec.retryManagerType = createTypeNode(typeParam.typeNode());
retrySpec.pos = getPosition(typeParam);
}
if (retryStatementNode.arguments().isPresent()) {
ParenthesizedArgList arg = retryStatementNode.arguments().get();
retrySpec.pos = getPosition(arg);
for (Node argNode : arg.arguments()) {
retrySpec.argExprs.add(createExpression(argNode));
}
}
if (retrySpec.pos == null) {
retrySpec.pos = getPosition(retryStatementNode);
}
return retrySpec;
}
@Override
public BLangNode transform(TransactionalExpressionNode transactionalExpressionNode) {
BLangTransactionalExpr transactionalExpr = TreeBuilder.createTransactionalExpressionNode();
transactionalExpr.pos = getPosition(transactionalExpressionNode);
return transactionalExpr;
}
@Override
public BLangNode transform(XMLFilterExpressionNode xmlFilterExpressionNode) {
List<BLangXMLElementFilter> filters = new ArrayList<>();
XMLNamePatternChainingNode xmlNamePatternChainingNode = xmlFilterExpressionNode.xmlPatternChain();
for (Node node : xmlNamePatternChainingNode.xmlNamePattern()) {
filters.add(createXMLElementFilter(node));
}
BLangExpression expr = createExpression(xmlFilterExpressionNode.expression());
BLangXMLElementAccess elementAccess = new BLangXMLElementAccess(getPosition(xmlFilterExpressionNode), null,
expr, filters);
return elementAccess;
}
@Override
public BLangNode transform(XMLStepExpressionNode xmlStepExpressionNode) {
List<BLangXMLElementFilter> filters = new ArrayList<>();
int starCount = 0;
if (xmlStepExpressionNode.xmlStepStart().kind() == SyntaxKind.SLASH_ASTERISK_TOKEN) {
starCount = 1;
} else if (xmlStepExpressionNode.xmlStepStart().kind() == SyntaxKind.XML_NAME_PATTERN_CHAIN) {
XMLNamePatternChainingNode xmlNamePatternChainingNode =
(XMLNamePatternChainingNode) xmlStepExpressionNode.xmlStepStart();
for (Node node : xmlNamePatternChainingNode.xmlNamePattern()) {
filters.add(createXMLElementFilter(node));
}
switch (xmlNamePatternChainingNode.startToken().kind()) {
case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:
starCount = 2;
break;
case SLASH_ASTERISK_TOKEN:
starCount = 1;
break;
}
}
BLangExpression expr = createExpression(xmlStepExpressionNode.expression());
BLangXMLNavigationAccess xmlNavigationAccess =
new BLangXMLNavigationAccess(getPosition(xmlStepExpressionNode), null, expr, filters,
XMLNavigationAccess.NavAccessType.fromInt(starCount), null);
return xmlNavigationAccess;
}
@Override
public BLangNode transform(MatchStatementNode matchStatementNode) {
BLangMatchStatement matchStatement = (BLangMatchStatement) TreeBuilder.createMatchStatementNode();
BLangExpression matchStmtExpr = createExpression(matchStatementNode.condition());
matchStatement.setExpression(matchStmtExpr);
for (MatchClauseNode matchClauseNode : matchStatementNode.matchClauses()) {
BLangMatchClause bLangMatchClause = (BLangMatchClause) TreeBuilder.createMatchClause();
bLangMatchClause.pos = getPosition(matchClauseNode);
bLangMatchClause.expr = matchStmtExpr;
boolean matchGuardAvailable = false;
if (matchClauseNode.matchGuard().isPresent()) {
matchGuardAvailable = true;
BLangMatchGuard bLangMatchGuard = (BLangMatchGuard) TreeBuilder.createMatchGuard();
bLangMatchGuard.expr = createExpression(matchClauseNode.matchGuard().get().expression());
bLangMatchGuard.pos = getPosition(matchClauseNode.matchGuard().get());
bLangMatchClause.setMatchGuard(bLangMatchGuard);
}
for (Node matchPattern : matchClauseNode.matchPatterns()) {
BLangMatchPattern bLangMatchPattern = transformMatchPattern(matchPattern);
if (bLangMatchPattern != null) {
bLangMatchPattern.matchExpr = matchStmtExpr;
bLangMatchPattern.matchGuardIsAvailable = matchGuardAvailable;
bLangMatchClause.addMatchPattern(bLangMatchPattern);
}
}
bLangMatchClause.setBlockStatement((BLangBlockStmt) transform(matchClauseNode.blockStatement()));
matchStatement.addMatchClause(bLangMatchClause);
}
matchStatementNode.onFailClause().ifPresent(onFailClauseNode -> {
matchStatement.setOnFailClause(
(org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this)));
});
matchStatement.pos = getPosition(matchStatementNode);
return matchStatement;
}
private BLangXMLSequenceLiteral createXmlSequence(TemplateExpressionNode expressionNode) {
BLangXMLSequenceLiteral xmlSequenceLiteral = (BLangXMLSequenceLiteral)
TreeBuilder.createXMLSequenceLiteralNode();
xmlSequenceLiteral.pos = getPosition(expressionNode);
Node lastNode = null;
List<Node> adjacentTextNodes = new ArrayList<>();
int xmlContentSize = expressionNode.content().size();
for (int index = 0; index < xmlContentSize; index++) {
Node childItem = expressionNode.content().get(index);
if (childItem.kind() == SyntaxKind.XML_TEXT || childItem.kind() == SyntaxKind.INTERPOLATION) {
adjacentTextNodes.add(childItem);
lastNode = childItem;
if (index != xmlContentSize - 1) {
continue;
}
}
if (lastNode != null && (lastNode.kind() == SyntaxKind.XML_TEXT ||
lastNode.kind() == SyntaxKind.INTERPOLATION)) {
if (adjacentTextNodes.size() > 1) {
xmlSequenceLiteral.xmlItems.add((BLangExpression) createXMLTextLiteral(adjacentTextNodes));
} else {
xmlSequenceLiteral.xmlItems.add(createXmlSingletonItem(lastNode));
}
adjacentTextNodes.clear();
if (lastNode.kind() == childItem.kind()) {
continue;
}
}
xmlSequenceLiteral.xmlItems.add(createXmlSingletonItem(childItem));
lastNode = childItem;
}
return xmlSequenceLiteral;
}
public BLangExpression createXmlSingletonItem(Node xmlTypeNode) {
switch (xmlTypeNode.kind()) {
case XML_COMMENT:
case XML_PI:
case XML_ELEMENT:
case XML_EMPTY_ELEMENT:
return createExpression(xmlTypeNode);
default:
return (BLangExpression) createXMLTextLiteral(xmlTypeNode);
}
}
public BLangNode createXmlTemplateLiteral(TemplateExpressionNode expressionNode) {
if (expressionNode.content().isEmpty()) {
return createXMLEmptyLiteral(expressionNode);
}
if (expressionNode.content().size() == 1) {
return createXmlSingletonItem(expressionNode.content().get(0));
}
return createXmlSequence(expressionNode);
}
private BLangMatchPattern transformMatchPattern(Node matchPattern) {
Location matchPatternPos = matchPattern.location();
SyntaxKind kind = matchPattern.kind();
if (kind == SyntaxKind.SIMPLE_NAME_REFERENCE &&
((SimpleNameReferenceNode) matchPattern).name().text().equals("_")) {
BLangWildCardMatchPattern bLangWildCardMatchPattern =
(BLangWildCardMatchPattern) TreeBuilder.createWildCardMatchPattern();
bLangWildCardMatchPattern.pos = matchPatternPos;
return bLangWildCardMatchPattern;
}
if (kind == SyntaxKind.IDENTIFIER_TOKEN && ((IdentifierToken) matchPattern).text().equals("_")) {
BLangWildCardMatchPattern bLangWildCardMatchPattern =
(BLangWildCardMatchPattern) TreeBuilder.createWildCardMatchPattern();
bLangWildCardMatchPattern.pos = matchPatternPos;
return bLangWildCardMatchPattern;
}
if (kind == SyntaxKind.TYPED_BINDING_PATTERN) {
TypedBindingPatternNode typedBindingPatternNode = (TypedBindingPatternNode) matchPattern;
BLangVarBindingPatternMatchPattern bLangVarBindingPattern =
(BLangVarBindingPatternMatchPattern) TreeBuilder.createVarBindingPattern();
bLangVarBindingPattern.pos = matchPatternPos;
bLangVarBindingPattern.setBindingPattern(transformBindingPattern(typedBindingPatternNode.bindingPattern()));
return bLangVarBindingPattern;
}
if (kind == SyntaxKind.ERROR_MATCH_PATTERN) {
return transformErrorMatchPattern((ErrorMatchPatternNode) matchPattern, matchPatternPos);
}
if (kind == SyntaxKind.NAMED_ARG_MATCH_PATTERN) {
return transformNamedArgMatchPattern((NamedArgMatchPatternNode) matchPattern, matchPatternPos);
}
if (kind == SyntaxKind.LIST_MATCH_PATTERN) {
return transformListMatchPattern((ListMatchPatternNode) matchPattern, matchPatternPos);
}
if (kind == SyntaxKind.REST_MATCH_PATTERN) {
return transformRestMatchPattern((RestMatchPatternNode) matchPattern, matchPatternPos);
}
if (kind == SyntaxKind.MAPPING_MATCH_PATTERN) {
return transformMappingMatchPattern((MappingMatchPatternNode) matchPattern, matchPatternPos);
}
if (kind == SyntaxKind.FIELD_MATCH_PATTERN) {
return transformFieldMatchPattern((FieldMatchPatternNode) matchPattern, matchPatternPos);
}
assert (kind == SyntaxKind.NUMERIC_LITERAL ||
kind == SyntaxKind.STRING_LITERAL ||
kind == SyntaxKind.SIMPLE_NAME_REFERENCE ||
kind == SyntaxKind.QUALIFIED_NAME_REFERENCE ||
kind == SyntaxKind.IDENTIFIER_TOKEN ||
kind == SyntaxKind.NULL_LITERAL ||
kind == SyntaxKind.NIL_LITERAL ||
kind == SyntaxKind.BOOLEAN_LITERAL ||
kind == SyntaxKind.UNARY_EXPRESSION);
BLangConstPattern bLangConstMatchPattern = (BLangConstPattern) TreeBuilder.createConstMatchPattern();
bLangConstMatchPattern.setExpression(createExpression(matchPattern));
bLangConstMatchPattern.pos = matchPatternPos;
return bLangConstMatchPattern;
}
private BLangErrorMatchPattern transformErrorMatchPattern(ErrorMatchPatternNode errorMatchPatternNode,
Location pos) {
BLangErrorMatchPattern bLangErrorMatchPattern =
(BLangErrorMatchPattern) TreeBuilder.createErrorMatchPattern();
bLangErrorMatchPattern.pos = pos;
NameReferenceNode nameReferenceNode;
if (errorMatchPatternNode.typeReference().isPresent()) {
nameReferenceNode = errorMatchPatternNode.typeReference().get();
bLangErrorMatchPattern.errorTypeReference = (BLangUserDefinedType) createTypeNode(nameReferenceNode);
}
if (errorMatchPatternNode.argListMatchPatternNode().size() == 0) {
return bLangErrorMatchPattern;
}
Node node = errorMatchPatternNode.argListMatchPatternNode().get(0);
if (isErrorFieldMatchPattern(node)) {
createErrorFieldMatchPatterns(0, errorMatchPatternNode, bLangErrorMatchPattern);
return bLangErrorMatchPattern;
}
bLangErrorMatchPattern.errorMessageMatchPattern = createErrorMessageMatchPattern(node);
if (errorMatchPatternNode.argListMatchPatternNode().size() == 1) {
return bLangErrorMatchPattern;
}
node = errorMatchPatternNode.argListMatchPatternNode().get(1);
if (isErrorFieldMatchPattern(node)) {
createErrorFieldMatchPatterns(1, errorMatchPatternNode, bLangErrorMatchPattern);
return bLangErrorMatchPattern;
}
bLangErrorMatchPattern.errorCauseMatchPattern = createErrorCauseMatchPattern(node);
createErrorFieldMatchPatterns(2, errorMatchPatternNode, bLangErrorMatchPattern);
return bLangErrorMatchPattern;
}
private BLangNamedArgMatchPattern transformNamedArgMatchPattern(NamedArgMatchPatternNode namedArgMatchPatternNode,
Location pos) {
BLangNamedArgMatchPattern bLangNamedArgMatchPattern =
(BLangNamedArgMatchPattern) TreeBuilder.createNamedArgMatchPattern();
bLangNamedArgMatchPattern.argName = createIdentifier(namedArgMatchPatternNode.identifier());
bLangNamedArgMatchPattern.matchPattern = transformMatchPattern(namedArgMatchPatternNode.matchPattern());
bLangNamedArgMatchPattern.pos = pos;
return bLangNamedArgMatchPattern;
}
private BLangListMatchPattern transformListMatchPattern(ListMatchPatternNode listMatchPatternNode,
Location pos) {
BLangListMatchPattern bLangListMatchPattern =
(BLangListMatchPattern) TreeBuilder.createListMatchPattern();
bLangListMatchPattern.pos = pos;
SeparatedNodeList<Node> matchPatterns = listMatchPatternNode.matchPatterns();
int matchPatternListSize = matchPatterns.size();
if (matchPatternListSize == 0) {
return bLangListMatchPattern;
}
for (int i = 0; i < matchPatternListSize - 1; i++) {
BLangMatchPattern bLangMemberMatchPattern = transformMatchPattern(matchPatterns.get(i));
if (bLangMemberMatchPattern == null) {
continue;
}
bLangListMatchPattern.addMatchPattern(bLangMemberMatchPattern);
}
BLangMatchPattern lastMember = transformMatchPattern(matchPatterns.get(matchPatternListSize - 1));
if (lastMember.getKind() == NodeKind.REST_MATCH_PATTERN) {
bLangListMatchPattern.setRestMatchPattern((BLangRestMatchPattern) lastMember);
} else {
bLangListMatchPattern.addMatchPattern(lastMember);
}
return bLangListMatchPattern;
}
private BLangRestMatchPattern transformRestMatchPattern(RestMatchPatternNode restMatchPatternNode, Location pos) {
BLangRestMatchPattern bLangRestMatchPattern = (BLangRestMatchPattern) TreeBuilder.createRestMatchPattern();
bLangRestMatchPattern.pos = pos;
SimpleNameReferenceNode variableName = restMatchPatternNode.variableName();
bLangRestMatchPattern.setIdentifier(createIdentifier(getPosition(variableName), variableName.name()));
return bLangRestMatchPattern;
}
private BLangMappingMatchPattern transformMappingMatchPattern(MappingMatchPatternNode mappingMatchPatternNode,
Location pos) {
BLangMappingMatchPattern bLangMappingMatchPattern =
(BLangMappingMatchPattern) TreeBuilder.createMappingMatchPattern();
bLangMappingMatchPattern.pos = pos;
SeparatedNodeList<Node> fieldMatchPatterns = mappingMatchPatternNode.fieldMatchPatterns();
int fieldMatchPatternListSize = fieldMatchPatterns.size();
if (fieldMatchPatternListSize == 0) {
return bLangMappingMatchPattern;
}
for (int i = 0; i < fieldMatchPatternListSize - 1; i++) {
bLangMappingMatchPattern.fieldMatchPatterns.add((BLangFieldMatchPattern)
transformMatchPattern(fieldMatchPatterns.get(i)));
}
BLangMatchPattern lastMember = transformMatchPattern(fieldMatchPatterns.get(fieldMatchPatternListSize - 1));
if (lastMember.getKind() == NodeKind.REST_MATCH_PATTERN) {
bLangMappingMatchPattern.setRestMatchPattern((BLangRestMatchPattern) lastMember);
} else {
bLangMappingMatchPattern.addFieldMatchPattern((BLangFieldMatchPattern) lastMember);
}
return bLangMappingMatchPattern;
}
private BLangFieldMatchPattern transformFieldMatchPattern(FieldMatchPatternNode fieldMatchPatternNode,
Location pos) {
BLangFieldMatchPattern bLangFieldMatchPattern =
(BLangFieldMatchPattern) TreeBuilder.createFieldMatchPattern();
bLangFieldMatchPattern.pos = pos;
bLangFieldMatchPattern.fieldName =
createIdentifier(fieldMatchPatternNode.fieldNameNode());
bLangFieldMatchPattern.matchPattern = transformMatchPattern(fieldMatchPatternNode.matchPattern());
return bLangFieldMatchPattern;
}
private BLangBindingPattern transformBindingPattern(Node bindingPattern) {
Location pos = getPosition(bindingPattern);
SyntaxKind patternKind = bindingPattern.kind();
switch (patternKind) {
case CAPTURE_BINDING_PATTERN:
return transformCaptureBindingPattern((CaptureBindingPatternNode) bindingPattern, pos);
case LIST_BINDING_PATTERN:
return transformListBindingPattern((ListBindingPatternNode) bindingPattern, pos);
case NAMED_ARG_BINDING_PATTERN:
return transformNamedArgBindingPattern((NamedArgBindingPatternNode) bindingPattern, pos);
case REST_BINDING_PATTERN:
return transformRestBindingPattern((RestBindingPatternNode) bindingPattern, pos);
case MAPPING_BINDING_PATTERN:
return transformMappingBindingPattern((MappingBindingPatternNode) bindingPattern, pos);
case FIELD_BINDING_PATTERN:
return transformFieldBindingPattern(bindingPattern, pos);
case ERROR_BINDING_PATTERN:
return transformErrorBindingPattern((ErrorBindingPatternNode) bindingPattern, pos);
case WILDCARD_BINDING_PATTERN:
default:
assert patternKind == SyntaxKind.WILDCARD_BINDING_PATTERN;
return transformWildCardBindingPattern(pos);
}
}
private BLangWildCardBindingPattern transformWildCardBindingPattern(Location pos) {
BLangWildCardBindingPattern bLangWildCardBindingPattern =
(BLangWildCardBindingPattern) TreeBuilder.createWildCardBindingPattern();
bLangWildCardBindingPattern.pos = pos;
return bLangWildCardBindingPattern;
}
private BLangCaptureBindingPattern transformCaptureBindingPattern(CaptureBindingPatternNode captureBindingPattern,
Location pos) {
BLangCaptureBindingPattern bLangCaptureBindingPattern =
(BLangCaptureBindingPattern) TreeBuilder.createCaptureBindingPattern();
bLangCaptureBindingPattern.setIdentifier(createIdentifier(captureBindingPattern.variableName()));
bLangCaptureBindingPattern.pos = pos;
return bLangCaptureBindingPattern;
}
private BLangRestBindingPattern transformRestBindingPattern(RestBindingPatternNode restBindingPatternNode,
Location pos) {
BLangRestBindingPattern bLangRestBindingPattern =
(BLangRestBindingPattern) TreeBuilder.createRestBindingPattern();
bLangRestBindingPattern.pos = pos;
SimpleNameReferenceNode variableName = restBindingPatternNode.variableName();
bLangRestBindingPattern.setIdentifier(createIdentifier(getPosition(variableName), variableName.name()));
return bLangRestBindingPattern;
}
private BLangListBindingPattern transformListBindingPattern(ListBindingPatternNode listBindingPatternNode,
Location pos) {
BLangListBindingPattern bLangListBindingPattern =
(BLangListBindingPattern) TreeBuilder.createListBindingPattern();
bLangListBindingPattern.pos = pos;
for (Node listMemberBindingPattern : listBindingPatternNode.bindingPatterns()) {
if (listMemberBindingPattern.kind() != SyntaxKind.REST_BINDING_PATTERN) {
bLangListBindingPattern.addBindingPattern(transformBindingPattern(listMemberBindingPattern));
continue;
}
bLangListBindingPattern.restBindingPattern =
(BLangRestBindingPattern) transformBindingPattern(listMemberBindingPattern);
}
return bLangListBindingPattern;
}
private BLangMappingBindingPattern transformMappingBindingPattern(MappingBindingPatternNode
mappingBindingPatternNode,
Location pos) {
BLangMappingBindingPattern bLangMappingBindingPattern =
(BLangMappingBindingPattern) TreeBuilder.createMappingBindingPattern();
bLangMappingBindingPattern.pos = pos;
for (Node fieldBindingPattern : mappingBindingPatternNode.fieldBindingPatterns()) {
if (fieldBindingPattern.kind() == SyntaxKind.REST_BINDING_PATTERN) {
bLangMappingBindingPattern.restBindingPattern =
(BLangRestBindingPattern) transformBindingPattern(fieldBindingPattern);
continue;
}
bLangMappingBindingPattern.fieldBindingPatterns.add(
(BLangFieldBindingPattern) transformBindingPattern(fieldBindingPattern));
}
return bLangMappingBindingPattern;
}
private BLangFieldBindingPattern transformFieldBindingPattern(Node bindingPattern, Location pos) {
BLangFieldBindingPattern bLangFieldBindingPattern =
(BLangFieldBindingPattern) TreeBuilder.createFieldBindingPattern();
bLangFieldBindingPattern.pos = pos;
if (bindingPattern instanceof FieldBindingPatternVarnameNode) {
FieldBindingPatternVarnameNode fieldBindingPatternVarnameNode =
(FieldBindingPatternVarnameNode) bindingPattern;
BLangIdentifier fieldName = createIdentifier(fieldBindingPatternVarnameNode.variableName().name());
bLangFieldBindingPattern.fieldName = fieldName;
BLangCaptureBindingPattern bLangCaptureBindingPatternInFieldBindingPattern =
(BLangCaptureBindingPattern) TreeBuilder.createCaptureBindingPattern();
bLangCaptureBindingPatternInFieldBindingPattern.setIdentifier(fieldName);
bLangCaptureBindingPatternInFieldBindingPattern.pos = pos;
bLangFieldBindingPattern.bindingPattern = bLangCaptureBindingPatternInFieldBindingPattern;
return bLangFieldBindingPattern;
}
FieldBindingPatternFullNode fieldBindingPatternNode = (FieldBindingPatternFullNode) bindingPattern;
bLangFieldBindingPattern.fieldName = createIdentifier(fieldBindingPatternNode.variableName().name());
bLangFieldBindingPattern.bindingPattern =
transformBindingPattern(fieldBindingPatternNode.bindingPattern());
return bLangFieldBindingPattern;
}
private BLangNamedArgBindingPattern transformNamedArgBindingPattern(NamedArgBindingPatternNode
namedArgBindingPattern,
Location pos) {
BLangNamedArgBindingPattern bLangNamedArgBindingPattern =
(BLangNamedArgBindingPattern) TreeBuilder.createNamedArgBindingPattern();
bLangNamedArgBindingPattern.pos = pos;
bLangNamedArgBindingPattern.argName = createIdentifier(namedArgBindingPattern.argName());
bLangNamedArgBindingPattern.bindingPattern =
transformBindingPattern(namedArgBindingPattern.bindingPattern());
return bLangNamedArgBindingPattern;
}
private BLangErrorBindingPattern transformErrorBindingPattern(ErrorBindingPatternNode errorBindingPatternNode,
Location pos) {
BLangErrorBindingPattern bLangErrorBindingPattern =
(BLangErrorBindingPattern) TreeBuilder.createErrorBindingPattern();
bLangErrorBindingPattern.pos = pos;
if (errorBindingPatternNode.typeReference().isPresent()) {
Node nameReferenceNode = errorBindingPatternNode.typeReference().get();
bLangErrorBindingPattern.errorTypeReference =
(BLangUserDefinedType) createTypeNode(nameReferenceNode);
}
if (errorBindingPatternNode.argListBindingPatterns().size() == 0) {
return bLangErrorBindingPattern;
}
Node node = errorBindingPatternNode.argListBindingPatterns().get(0);
if (isErrorFieldBindingPattern(node)) {
createErrorFieldBindingPatterns(0, errorBindingPatternNode, bLangErrorBindingPattern);
return bLangErrorBindingPattern;
}
bLangErrorBindingPattern.errorMessageBindingPattern = createErrorMessageBindingPattern(node);
if (errorBindingPatternNode.argListBindingPatterns().size() == 1) {
return bLangErrorBindingPattern;
}
node = errorBindingPatternNode.argListBindingPatterns().get(1);
if (isErrorFieldBindingPattern(node)) {
createErrorFieldBindingPatterns(1, errorBindingPatternNode, bLangErrorBindingPattern);
return bLangErrorBindingPattern;
}
bLangErrorBindingPattern.errorCauseBindingPattern = createErrorCauseBindingPattern(node);
createErrorFieldBindingPatterns(2, errorBindingPatternNode, bLangErrorBindingPattern);
return bLangErrorBindingPattern;
}
private boolean isErrorFieldMatchPattern(Node node) {
return node.kind() == SyntaxKind.NAMED_ARG_MATCH_PATTERN || node.kind() == SyntaxKind.REST_MATCH_PATTERN;
}
private boolean isErrorFieldBindingPattern(Node node) {
return node.kind() == SyntaxKind.NAMED_ARG_BINDING_PATTERN || node.kind() == SyntaxKind.REST_BINDING_PATTERN;
}
private BLangErrorMessageMatchPattern createErrorMessageMatchPattern(Node node) {
BLangMatchPattern matchPattern = transformMatchPattern(node);
BLangErrorMessageMatchPattern bLangErrorMessageMatchPattern =
(BLangErrorMessageMatchPattern) TreeBuilder.createErrorMessageMatchPattern();
bLangErrorMessageMatchPattern.pos = getPosition(node);
bLangErrorMessageMatchPattern.simpleMatchPattern = createSimpleMatchPattern(matchPattern);
return bLangErrorMessageMatchPattern;
}
private BLangErrorMessageBindingPattern createErrorMessageBindingPattern(Node node) {
BLangBindingPattern bindingPattern = transformBindingPattern(node);
BLangErrorMessageBindingPattern bLangErrorMessageBindingPattern =
(BLangErrorMessageBindingPattern) TreeBuilder.createErrorMessageBindingPattern();
bLangErrorMessageBindingPattern.pos = getPosition(node);
bLangErrorMessageBindingPattern.simpleBindingPattern = createSimpleBindingPattern(bindingPattern);
return bLangErrorMessageBindingPattern;
}
private BLangErrorCauseMatchPattern createErrorCauseMatchPattern(Node node) {
BLangMatchPattern matchPattern = transformMatchPattern(node);
BLangErrorCauseMatchPattern bLangErrorCauseMatchPattern =
(BLangErrorCauseMatchPattern) TreeBuilder.createErrorCauseMatchPattern();
bLangErrorCauseMatchPattern.pos = getPosition(node);
if (matchPattern.getKind() == NodeKind.ERROR_MATCH_PATTERN) {
bLangErrorCauseMatchPattern.errorMatchPattern = (BLangErrorMatchPattern) matchPattern;
return bLangErrorCauseMatchPattern;
}
bLangErrorCauseMatchPattern.simpleMatchPattern = createSimpleMatchPattern(matchPattern);
return bLangErrorCauseMatchPattern;
}
private BLangErrorCauseBindingPattern createErrorCauseBindingPattern(Node node) {
BLangBindingPattern bindingPattern = transformBindingPattern(node);
BLangErrorCauseBindingPattern bLangErrorCauseBindingPattern =
(BLangErrorCauseBindingPattern) TreeBuilder.createErrorCauseBindingPattern();
bLangErrorCauseBindingPattern.pos = getPosition(node);
if (bindingPattern.getKind() == NodeKind.ERROR_BINDING_PATTERN) {
bLangErrorCauseBindingPattern.errorBindingPattern = (BLangErrorBindingPattern) bindingPattern;
return bLangErrorCauseBindingPattern;
}
bLangErrorCauseBindingPattern.simpleBindingPattern = createSimpleBindingPattern(bindingPattern);
return bLangErrorCauseBindingPattern;
}
private BLangErrorFieldMatchPatterns createErrorFieldMatchPattern(Node errorFieldMatchPatternNode,
BLangErrorFieldMatchPatterns bLangErrorFieldMatchPatterns) {
BLangMatchPattern matchPattern = transformMatchPattern(errorFieldMatchPatternNode);
bLangErrorFieldMatchPatterns.pos = getPosition(errorFieldMatchPatternNode);
if (matchPattern.getKind() == NodeKind.NAMED_ARG_MATCH_PATTERN) {
bLangErrorFieldMatchPatterns.addNamedArgMatchPattern(
(org.ballerinalang.model.tree.matchpatterns.NamedArgMatchPatternNode) matchPattern);
} else if (matchPattern.getKind() == NodeKind.REST_MATCH_PATTERN) {
bLangErrorFieldMatchPatterns.restMatchPattern = (BLangRestMatchPattern) matchPattern;
}
return bLangErrorFieldMatchPatterns;
}
private BLangErrorFieldBindingPatterns createErrorFieldBindingPattern(Node errorFieldBindingPatternNode,
BLangErrorFieldBindingPatterns
bLangErrorFieldBindingPatterns) {
BLangBindingPattern bindingPattern = transformBindingPattern(errorFieldBindingPatternNode);
bLangErrorFieldBindingPatterns.pos = getPosition(errorFieldBindingPatternNode);
if (bindingPattern.getKind() == NodeKind.NAMED_ARG_BINDING_PATTERN) {
bLangErrorFieldBindingPatterns.
addNamedArgBindingPattern(
(org.ballerinalang.model.tree.bindingpattern.NamedArgBindingPatternNode) bindingPattern);
} else if (bindingPattern.getKind() == NodeKind.REST_BINDING_PATTERN) {
bLangErrorFieldBindingPatterns.restBindingPattern = (BLangRestBindingPattern) bindingPattern;
}
return bLangErrorFieldBindingPatterns;
}
private void createErrorFieldMatchPatterns(int index, ErrorMatchPatternNode errorMatchPatternNode,
BLangErrorMatchPattern bLangErrorMatchPattern) {
BLangErrorFieldMatchPatterns bLangErrorFieldMatchPatterns =
(BLangErrorFieldMatchPatterns) TreeBuilder.createErrorFieldMatchPattern();
for (int i = index; i < errorMatchPatternNode.argListMatchPatternNode().size(); i++) {
Node errorFieldMatchPatternNode = errorMatchPatternNode.argListMatchPatternNode().get(i);
bLangErrorMatchPattern.errorFieldMatchPatterns = createErrorFieldMatchPattern(errorFieldMatchPatternNode,
bLangErrorFieldMatchPatterns);
}
}
private void createErrorFieldBindingPatterns(int index, ErrorBindingPatternNode errorBindingPatternNode,
BLangErrorBindingPattern bLangErrorBindingPattern) {
BLangErrorFieldBindingPatterns bLangErrorFieldBindingPatterns =
(BLangErrorFieldBindingPatterns) TreeBuilder.createErrorFieldBindingPattern();
for (int i = index; i < errorBindingPatternNode.argListBindingPatterns().size(); i++) {
Node errorFieldBindingPatternNode = errorBindingPatternNode.argListBindingPatterns().get(i);
bLangErrorBindingPattern.errorFieldBindingPatterns =
createErrorFieldBindingPattern(errorFieldBindingPatternNode, bLangErrorFieldBindingPatterns);
}
}
private BLangSimpleMatchPattern createSimpleMatchPattern(BLangNode bLangNode) {
BLangSimpleMatchPattern bLangSimpleMatchPattern =
(BLangSimpleMatchPattern) TreeBuilder.createSimpleMatchPattern();
NodeKind kind = bLangNode.getKind();
switch (kind) {
case WILDCARD_MATCH_PATTERN:
bLangSimpleMatchPattern.wildCardMatchPattern = (BLangWildCardMatchPattern) bLangNode;
break;
case CONST_MATCH_PATTERN:
bLangSimpleMatchPattern.constPattern = (BLangConstPattern) bLangNode;
break;
case VAR_BINDING_PATTERN_MATCH_PATTERN:
bLangSimpleMatchPattern.varVariableName = (BLangVarBindingPatternMatchPattern) bLangNode;
break;
}
return bLangSimpleMatchPattern;
}
private BLangCaptureBindingPattern createCaptureBindingPattern(CaptureBindingPatternNode
captureBindingPatternNode) {
BLangCaptureBindingPattern bLangCaptureBindingPattern =
(BLangCaptureBindingPattern) TreeBuilder.createCaptureBindingPattern();
bLangCaptureBindingPattern.setIdentifier(createIdentifier(captureBindingPatternNode
.variableName()));
bLangCaptureBindingPattern.pos = getPosition(captureBindingPatternNode);
return bLangCaptureBindingPattern;
}
private BLangSimpleBindingPattern createSimpleBindingPattern(BLangNode bLangNode) {
BLangSimpleBindingPattern bLangSimpleBindingPattern =
(BLangSimpleBindingPattern) TreeBuilder.createSimpleBindingPattern();
NodeKind kind = bLangNode.getKind();
switch (kind) {
case WILDCARD_BINDING_PATTERN:
bLangSimpleBindingPattern.wildCardBindingPattern = (BLangWildCardBindingPattern) bLangNode;
break;
case CAPTURE_BINDING_PATTERN:
bLangSimpleBindingPattern.captureBindingPattern = (BLangCaptureBindingPattern) bLangNode;
break;
}
return bLangSimpleBindingPattern;
}
private BLangXMLElementFilter createXMLElementFilter(Node node) {
String ns = "";
String elementName = "*";
Location nsPos = null;
Location elemNamePos = null;
SyntaxKind kind = node.kind();
switch (kind) {
case SIMPLE_NAME_REFERENCE:
SimpleNameReferenceNode simpleNameReferenceNode = (SimpleNameReferenceNode) node;
elementName = simpleNameReferenceNode.name().text();
elemNamePos = getPosition(simpleNameReferenceNode);
break;
case QUALIFIED_NAME_REFERENCE:
QualifiedNameReferenceNode qualifiedNameReferenceNode = (QualifiedNameReferenceNode) node;
elementName = qualifiedNameReferenceNode.identifier().text();
elemNamePos = getPosition(qualifiedNameReferenceNode.identifier());
ns = qualifiedNameReferenceNode.modulePrefix().text();
nsPos = getPosition(qualifiedNameReferenceNode.modulePrefix());
break;
case XML_ATOMIC_NAME_PATTERN:
XMLAtomicNamePatternNode atomicNamePatternNode = (XMLAtomicNamePatternNode) node;
elementName = atomicNamePatternNode.name().text();
elemNamePos = getPosition(atomicNamePatternNode.name());
ns = atomicNamePatternNode.prefix().text();
nsPos = getPosition(atomicNamePatternNode.prefix());
break;
case ASTERISK_TOKEN:
elemNamePos = getPosition(node);
}
if (stringStartsWithSingleQuote(ns)) {
ns = ns.substring(1);
}
if (stringStartsWithSingleQuote(elementName)) {
elementName = elementName.substring(1);
}
return new BLangXMLElementFilter(getPosition(node), null, ns, nsPos, elementName, elemNamePos);
}
private boolean stringStartsWithSingleQuote(String ns) {
return ns != null && ns.length() > 0 && ns.charAt(0) == '\'';
}
private String getValueFromByteArrayNode(ByteArrayLiteralNode byteArrayLiteralNode) {
StringBuilder value = new StringBuilder();
value.append(byteArrayLiteralNode.type().text());
value.append(" ");
value.append("`");
if (byteArrayLiteralNode.content().isPresent()) {
value.append(byteArrayLiteralNode.content().get().text());
}
value.append("`");
return value.toString();
}
private BLangRecordVariable createBLangRecordVariable(MappingBindingPatternNode mappingBindingPatternNode) {
BLangRecordVariable recordVariable = (BLangRecordVariable) TreeBuilder.createRecordVariableNode();
List<BLangRecordVariableKeyValue> fieldBindingPatternsList = new ArrayList<>();
for (BindingPatternNode node : mappingBindingPatternNode.fieldBindingPatterns()) {
BLangRecordVariableKeyValue recordKeyValue = new BLangRecordVariableKeyValue();
if (node instanceof FieldBindingPatternFullNode) {
FieldBindingPatternFullNode fullNode = (FieldBindingPatternFullNode) node;
recordKeyValue.key = createIdentifier(fullNode.variableName().name());
recordKeyValue.valueBindingPattern = getBLangVariableNode(fullNode.bindingPattern());
} else if (node instanceof FieldBindingPatternVarnameNode) {
FieldBindingPatternVarnameNode varnameNode = (FieldBindingPatternVarnameNode) node;
recordKeyValue.key = createIdentifier(varnameNode.variableName().name());
BLangSimpleVariable value = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
value.pos = getPosition(varnameNode);
IdentifierNode name = createIdentifier(varnameNode.variableName().name());
((BLangIdentifier) name).pos = value.pos;
value.setName(name);
recordKeyValue.valueBindingPattern = value;
} else {
recordVariable.restParam = getBLangVariableNode(node);
break;
}
fieldBindingPatternsList.add(recordKeyValue);
}
recordVariable.variableList = fieldBindingPatternsList;
recordVariable.pos = getPosition(mappingBindingPatternNode);
return recordVariable;
}
private BLangLiteral createEmptyLiteral() {
BLangLiteral bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
bLiteral.value = "";
bLiteral.originalValue = "";
bLiteral.type = symTable.getTypeFromTag(TypeTags.STRING);
return bLiteral;
}
private BLangVariable createSimpleVariable(Location location,
Token identifier,
Location identifierPos) {
BLangSimpleVariable memberVar = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
memberVar.pos = location;
IdentifierNode name = createIdentifier(identifierPos, identifier);
((BLangIdentifier) name).pos = identifierPos;
memberVar.setName(name);
return memberVar;
}
private BLangVariable getBLangVariableNode(BindingPatternNode bindingPattern) {
Token varName;
switch (bindingPattern.kind()) {
case MAPPING_BINDING_PATTERN:
MappingBindingPatternNode mappingBindingPatternNode = (MappingBindingPatternNode) bindingPattern;
return createBLangRecordVariable(mappingBindingPatternNode);
case LIST_BINDING_PATTERN:
ListBindingPatternNode listBindingPatternNode = (ListBindingPatternNode) bindingPattern;
BLangTupleVariable tupleVariable = (BLangTupleVariable) TreeBuilder.createTupleVariableNode();
tupleVariable.pos = getPosition(listBindingPatternNode);
for (BindingPatternNode memberBindingPattern : listBindingPatternNode.bindingPatterns()) {
if (memberBindingPattern.kind() == SyntaxKind.REST_BINDING_PATTERN) {
tupleVariable.restVariable = getBLangVariableNode(memberBindingPattern);
} else {
BLangVariable member = getBLangVariableNode(memberBindingPattern);
tupleVariable.memberVariables.add(member);
}
}
return tupleVariable;
case ERROR_BINDING_PATTERN:
ErrorBindingPatternNode errorBindingPatternNode = (ErrorBindingPatternNode) bindingPattern;
BLangErrorVariable bLangErrorVariable = (BLangErrorVariable) TreeBuilder.createErrorVariableNode();
bLangErrorVariable.pos = getPosition(errorBindingPatternNode);
Optional<Node> errorTypeRef = errorBindingPatternNode.typeReference();
if (errorTypeRef.isPresent()) {
bLangErrorVariable.typeNode = createTypeNode(errorTypeRef.get());
}
SeparatedNodeList<BindingPatternNode> argListBindingPatterns =
errorBindingPatternNode.argListBindingPatterns();
int numberOfArgs = argListBindingPatterns.size();
List<BLangErrorVariable.BLangErrorDetailEntry> namedArgs = new ArrayList<>();
for (int position = 0; position < numberOfArgs; position++) {
BindingPatternNode bindingPatternNode = argListBindingPatterns.get(position);
switch (bindingPatternNode.kind()) {
case CAPTURE_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
if (position == 0) {
bLangErrorVariable.message =
(BLangSimpleVariable) getBLangVariableNode(bindingPatternNode);
break;
}
case ERROR_BINDING_PATTERN:
bLangErrorVariable.cause = getBLangVariableNode(bindingPatternNode);
break;
case NAMED_ARG_BINDING_PATTERN:
NamedArgBindingPatternNode namedArgBindingPatternNode =
(NamedArgBindingPatternNode) bindingPatternNode;
BLangIdentifier key =
createIdentifier(namedArgBindingPatternNode.argName());
BLangVariable valueBindingPattern =
getBLangVariableNode(namedArgBindingPatternNode.bindingPattern());
BLangErrorVariable.BLangErrorDetailEntry detailEntry =
new BLangErrorVariable.BLangErrorDetailEntry(key, valueBindingPattern);
namedArgs.add(detailEntry);
break;
default:
bLangErrorVariable.restDetail =
(BLangSimpleVariable) getBLangVariableNode(bindingPatternNode);
}
}
bLangErrorVariable.detail = namedArgs;
return bLangErrorVariable;
case REST_BINDING_PATTERN:
RestBindingPatternNode restBindingPatternNode = (RestBindingPatternNode) bindingPattern;
varName = restBindingPatternNode.variableName().name();
break;
case WILDCARD_BINDING_PATTERN:
WildcardBindingPatternNode wildcardBindingPatternNode = (WildcardBindingPatternNode) bindingPattern;
varName = wildcardBindingPatternNode.underscoreToken();
break;
case CAPTURE_BINDING_PATTERN:
default:
CaptureBindingPatternNode captureBindingPatternNode = (CaptureBindingPatternNode) bindingPattern;
varName = captureBindingPatternNode.variableName();
break;
}
Location pos = getPosition(bindingPattern);
return createSimpleVariable(pos, varName, getPosition(varName));
}
BLangValueType addValueType(Location pos, TypeKind typeKind) {
BLangValueType typeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
typeNode.pos = pos;
typeNode.typeKind = typeKind;
return typeNode;
}
private List<BLangStatement> generateBLangStatements(NodeList<StatementNode> statementNodes) {
List<BLangStatement> statements = new ArrayList<>();
return generateAndAddBLangStatements(statementNodes, statements);
}
private List<BLangStatement> generateAndAddBLangStatements(NodeList<StatementNode> statementNodes,
List<BLangStatement> statements) {
for (StatementNode statement : statementNodes) {
if (statement != null) {
if (statement.kind() == SyntaxKind.FORK_STATEMENT) {
generateForkStatements(statements, (ForkStatementNode) statement);
continue;
}
statements.add((BLangStatement) statement.apply(this));
}
}
return statements;
}
private String extractVersion(SeparatedNodeList<Token> versionNumbers) {
StringBuilder version = new StringBuilder();
int size = versionNumbers.size();
for (int i = 0; i < size; i++) {
if (i != 0) {
version.append(".");
}
version.append(versionNumbers.get(i).text());
}
return version.toString();
}
private void generateForkStatements(List<BLangStatement> statements, ForkStatementNode forkStatementNode) {
BLangForkJoin forkJoin = (BLangForkJoin) forkStatementNode.apply(this);
String nextAnonymousForkKey = anonymousModelHelper.getNextAnonymousForkKey(packageID);
for (NamedWorkerDeclarationNode workerDeclarationNode : forkStatementNode.namedWorkerDeclarations()) {
BLangSimpleVariableDef workerDef = (BLangSimpleVariableDef) workerDeclarationNode.apply(this);
workerDef.isWorker = true;
workerDef.isInFork = true;
workerDef.var.flagSet.add(Flag.FORKED);
BLangFunction function = ((BLangLambdaFunction) workerDef.var.expr).function;
function.addFlag(Flag.FORKED);
function.anonForkName = nextAnonymousForkKey;
statements.add(workerDef);
while (!this.additionalStatements.empty()) {
statements.add(additionalStatements.pop());
}
forkJoin.addWorkers(workerDef);
}
statements.add(forkJoin);
}
private BLangCheckedExpr createCheckExpr(Location pos, BLangExpression expr) {
BLangCheckedExpr checkedExpr = (BLangCheckedExpr) TreeBuilder.createCheckExpressionNode();
checkedExpr.pos = pos;
checkedExpr.expr = expr;
return checkedExpr;
}
private BLangCheckPanickedExpr createCheckPanickedExpr(Location pos, BLangExpression expr) {
BLangCheckPanickedExpr checkPanickedExpr =
(BLangCheckPanickedExpr) TreeBuilder.createCheckPanicExpressionNode();
checkPanickedExpr.pos = pos;
checkPanickedExpr.expr = expr;
return checkPanickedExpr;
}
private void populateFuncSignature(BLangFunction bLFunction, FunctionSignatureNode funcSignature) {
for (ParameterNode child : funcSignature.parameters()) {
SimpleVariableNode param = (SimpleVariableNode) child.apply(this);
if (child instanceof RestParameterNode) {
bLFunction.setRestParameter(param);
} else {
bLFunction.addParameter(param);
}
}
Optional<ReturnTypeDescriptorNode> retNode = funcSignature.returnTypeDesc();
if (retNode.isPresent()) {
ReturnTypeDescriptorNode returnType = retNode.get();
bLFunction.setReturnTypeNode(createTypeNode(returnType.type()));
bLFunction.returnTypeAnnAttachments = applyAll(returnType.annotations());
} else {
BLangValueType bLValueType = (BLangValueType) TreeBuilder.createValueTypeNode();
bLValueType.pos = symTable.builtinPos;
bLValueType.typeKind = TypeKind.NIL;
bLFunction.setReturnTypeNode(bLValueType);
}
}
private BLangUnaryExpr createBLangUnaryExpr(Location location,
OperatorKind operatorKind,
BLangExpression expr) {
BLangUnaryExpr bLUnaryExpr = (BLangUnaryExpr) TreeBuilder.createUnaryExpressionNode();
bLUnaryExpr.pos = location;
bLUnaryExpr.operator = operatorKind;
bLUnaryExpr.expr = expr;
return bLUnaryExpr;
}
private BLangExpression createExpression(Node expression) {
if (expression.kind() == SyntaxKind.ASYNC_SEND_ACTION) {
dlog.error(getPosition(expression), DiagnosticErrorCode.ASYNC_SEND_NOT_YET_SUPPORTED_AS_EXPRESSION);
Token missingIdentifier = NodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN,
NodeFactory.createEmptyMinutiaeList(), NodeFactory.createEmptyMinutiaeList());
expression = NodeFactory.createSimpleNameReferenceNode(missingIdentifier);
}
return (BLangExpression) createActionOrExpression(expression);
}
private BLangNode createActionOrExpression(Node actionOrExpression) {
if (isSimpleLiteral(actionOrExpression.kind())) {
return createSimpleLiteral(actionOrExpression);
} else if (actionOrExpression.kind() == SyntaxKind.SIMPLE_NAME_REFERENCE ||
actionOrExpression.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE ||
actionOrExpression.kind() == SyntaxKind.IDENTIFIER_TOKEN) {
BLangNameReference nameReference = createBLangNameReference(actionOrExpression);
BLangSimpleVarRef bLVarRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
bLVarRef.pos = getPosition(actionOrExpression);
bLVarRef.pkgAlias = this.createIdentifier((Location) nameReference.pkgAlias.getPosition(),
nameReference.pkgAlias.getValue());
bLVarRef.variableName = this.createIdentifier((Location) nameReference.name.getPosition(),
nameReference.name.getValue());
return bLVarRef;
} else if (actionOrExpression.kind() == SyntaxKind.BRACED_EXPRESSION) {
BLangGroupExpr group = (BLangGroupExpr) TreeBuilder.createGroupExpressionNode();
group.expression = (BLangExpression) actionOrExpression.apply(this);
group.pos = getPosition(actionOrExpression);
return group;
} else if (isType(actionOrExpression.kind())) {
BLangTypedescExpr typeAccessExpr = (BLangTypedescExpr) TreeBuilder.createTypeAccessNode();
typeAccessExpr.pos = getPosition(actionOrExpression);
typeAccessExpr.typeNode = createTypeNode(actionOrExpression);
return typeAccessExpr;
} else {
return actionOrExpression.apply(this);
}
}
private BLangNode createStringTemplateLiteral(NodeList<Node> memberNodes, Location location) {
BLangStringTemplateLiteral stringTemplateLiteral =
(BLangStringTemplateLiteral) TreeBuilder.createStringTemplateLiteralNode();
for (Node memberNode : memberNodes) {
stringTemplateLiteral.exprs.add((BLangExpression) memberNode.apply(this));
}
if (stringTemplateLiteral.exprs.isEmpty()) {
BLangLiteral emptyLiteral = createEmptyLiteral();
emptyLiteral.pos = location;
stringTemplateLiteral.exprs.add(emptyLiteral);
}
stringTemplateLiteral.pos = location;
return stringTemplateLiteral;
}
private BLangRawTemplateLiteral createRawTemplateLiteral(NodeList<Node> members, Location location) {
BLangRawTemplateLiteral literal = (BLangRawTemplateLiteral) TreeBuilder.createRawTemplateLiteralNode();
literal.pos = location;
boolean prevNodeWasInterpolation = false;
Node firstMember = members.isEmpty() ? null : members.get(0);
if (firstMember != null && firstMember.kind() == SyntaxKind.INTERPOLATION) {
literal.strings.add(createStringLiteral("", getPosition(firstMember)));
}
for (Node member : members) {
if (member.kind() == SyntaxKind.INTERPOLATION) {
literal.insertions.add((BLangExpression) member.apply(this));
if (prevNodeWasInterpolation) {
literal.strings.add(createStringLiteral("", getPosition(member)));
}
prevNodeWasInterpolation = true;
} else {
literal.strings.add((BLangLiteral) member.apply(this));
prevNodeWasInterpolation = false;
}
}
if (prevNodeWasInterpolation) {
literal.strings.add(createStringLiteral("", getPosition(members.get(members.size() - 1))));
}
return literal;
}
private BLangSimpleVariable createSimpleVar(Optional<Token> name, Node type, NodeList<AnnotationNode> annotations) {
if (name.isPresent()) {
Token nameToken = name.get();
return createSimpleVar(nameToken, type, null, null, annotations);
}
return createSimpleVar(null, type, null, null, annotations);
}
private BLangSimpleVariable createSimpleVar(Token name, Node type, NodeList<AnnotationNode> annotations) {
return createSimpleVar(name, type, null, null, annotations);
}
private BLangSimpleVariable createSimpleVar(Token name, Node typeName, Node initializer,
Token visibilityQualifier, NodeList<AnnotationNode> annotations) {
BLangSimpleVariable bLSimpleVar = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
bLSimpleVar.setName(this.createIdentifier(name));
bLSimpleVar.name.pos = getPosition(name);
if (isDeclaredWithVar(typeName)) {
bLSimpleVar.isDeclaredWithVar = true;
} else {
bLSimpleVar.setTypeNode(createTypeNode(typeName));
}
if (visibilityQualifier != null) {
if (visibilityQualifier.kind() == SyntaxKind.PRIVATE_KEYWORD) {
bLSimpleVar.flagSet.add(Flag.PRIVATE);
} else if (visibilityQualifier.kind() == SyntaxKind.PUBLIC_KEYWORD) {
bLSimpleVar.flagSet.add(Flag.PUBLIC);
}
}
if (initializer != null) {
bLSimpleVar.setInitialExpression(createExpression(initializer));
}
if (annotations != null) {
bLSimpleVar.annAttachments = applyAll(annotations);
}
return bLSimpleVar;
}
private boolean isDeclaredWithVar(Node typeNode) {
if (typeNode == null || typeNode.kind() == SyntaxKind.VAR_TYPE_DESC) {
return true;
}
return false;
}
private BLangIdentifier createIdentifier(Token token) {
return createIdentifier(getPosition(token), token);
}
private BLangIdentifier createIdentifier(Location pos, Token token) {
if (token == null) {
return createIdentifier(pos, null, null);
}
String identifierName;
if (token.isMissing()) {
identifierName = missingNodesHelper.getNextMissingNodeName(packageID);
} else {
identifierName = token.text();
}
return createIdentifier(pos, identifierName);
}
private BLangIdentifier createIdentifier(Location pos, String value) {
return createIdentifier(pos, value, null);
}
private BLangIdentifier createIdentifier(Location pos, String value, Set<Whitespace> ws) {
BLangIdentifier bLIdentifer = (BLangIdentifier) TreeBuilder.createIdentifierNode();
if (value == null) {
return bLIdentifer;
}
if (value.startsWith(IDENTIFIER_LITERAL_PREFIX)) {
bLIdentifer.setValue(IdentifierUtils.unescapeUnicodeCodepoints(value.substring(1)));
bLIdentifer.originalValue = value;
bLIdentifer.setLiteral(true);
} else {
bLIdentifer.setValue(IdentifierUtils.unescapeUnicodeCodepoints(value));
bLIdentifer.setLiteral(false);
}
bLIdentifer.pos = pos;
if (ws != null) {
bLIdentifer.addWS(ws);
}
return bLIdentifer;
}
private BLangLiteral createEmptyStringLiteral(Location pos) {
BLangLiteral bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
bLiteral.pos = pos;
bLiteral.type = symTable.stringType;
bLiteral.value = "";
bLiteral.originalValue = "";
return bLiteral;
}
private BLangLiteral createSimpleLiteral(Node literal) {
return createSimpleLiteral(literal, false);
}
private BLangLiteral createSimpleLiteral(Node literal, boolean isFiniteType) {
if (literal.kind() == SyntaxKind.UNARY_EXPRESSION) {
UnaryExpressionNode unaryExpr = (UnaryExpressionNode) literal;
BLangLiteral bLangLiteral =
createSimpleLiteral(unaryExpr.expression(), unaryExpr.unaryOperator().kind(), isFiniteType);
bLangLiteral.pos = getPosition(unaryExpr);
return bLangLiteral;
}
return createSimpleLiteral(literal, SyntaxKind.NONE, isFiniteType);
}
private BLangLiteral createSimpleLiteral(Node literal, SyntaxKind sign, boolean isFiniteType) {
BLangLiteral bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
SyntaxKind type = literal.kind();
int typeTag = -1;
Object value = null;
String originalValue = null;
String textValue;
if (literal instanceof BasicLiteralNode) {
textValue = ((BasicLiteralNode) literal).literalToken().text();
} else if (literal instanceof Token) {
textValue = ((Token) literal).text();
} else {
textValue = "";
}
if (sign == SyntaxKind.PLUS_TOKEN) {
textValue = "+" + textValue;
} else if (sign == SyntaxKind.MINUS_TOKEN) {
textValue = "-" + textValue;
}
if (type == SyntaxKind.NUMERIC_LITERAL) {
SyntaxKind literalTokenKind = ((BasicLiteralNode) literal).literalToken().kind();
if (literalTokenKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN ||
literalTokenKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) {
typeTag = TypeTags.INT;
value = getIntegerLiteral(literal, textValue, sign);
originalValue = textValue;
bLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression();
if (literalTokenKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN && withinByteRange(value)) {
typeTag = TypeTags.BYTE;
}
} else if (literalTokenKind == SyntaxKind.DECIMAL_FLOATING_POINT_LITERAL_TOKEN) {
typeTag = NumericLiteralSupport.isDecimalDiscriminated(textValue) ? TypeTags.DECIMAL : TypeTags.FLOAT;
if (isFiniteType) {
value = textValue.replaceAll("[fd+]", "");
originalValue = textValue.replace("+", "");
} else {
value = textValue;
originalValue = textValue;
}
bLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression();
} else if (literalTokenKind == SyntaxKind.HEX_FLOATING_POINT_LITERAL_TOKEN) {
typeTag = TypeTags.FLOAT;
value = getHexNodeValue(textValue);
originalValue = textValue;
bLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression();
}
} else if (type == SyntaxKind.BOOLEAN_LITERAL) {
typeTag = TypeTags.BOOLEAN;
value = Boolean.parseBoolean(textValue);
originalValue = textValue;
bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
} else if (type == SyntaxKind.STRING_LITERAL || type == SyntaxKind.XML_TEXT_CONTENT ||
type == SyntaxKind.TEMPLATE_STRING || type == SyntaxKind.IDENTIFIER_TOKEN) {
String text = textValue;
if (type == SyntaxKind.STRING_LITERAL) {
if (text.length() > 1 && text.charAt(text.length() - 1) == '"') {
text = text.substring(1, text.length() - 1);
} else {
text = text.substring(1);
}
}
String originalText = text;
Location pos = getPosition(literal);
Matcher matcher = IdentifierUtils.UNICODE_PATTERN.matcher(text);
int position = 0;
while (matcher.find(position)) {
String hexStringVal = matcher.group(1);
int hexDecimalVal = Integer.parseInt(hexStringVal, 16);
if ((hexDecimalVal >= Constants.MIN_UNICODE && hexDecimalVal <= Constants.MIDDLE_LIMIT_UNICODE)
|| hexDecimalVal > Constants.MAX_UNICODE) {
String hexStringWithBraces = matcher.group(0);
int offset = originalText.indexOf(hexStringWithBraces) + 1;
dlog.error(new BLangDiagnosticLocation(currentCompUnitName,
pos.lineRange().startLine().line(),
pos.lineRange().endLine().line(),
pos.lineRange().startLine().offset() + offset,
pos.lineRange().startLine().offset() + offset + hexStringWithBraces.length()),
DiagnosticErrorCode.INVALID_UNICODE, hexStringWithBraces);
}
text = matcher.replaceFirst("\\\\u" + fillWithZeros(hexStringVal));
position = matcher.end() - 2;
matcher = IdentifierUtils.UNICODE_PATTERN.matcher(text);
}
if (type != SyntaxKind.TEMPLATE_STRING && type != SyntaxKind.XML_TEXT_CONTENT) {
try {
text = StringEscapeUtils.unescapeJava(text);
} catch (Exception e) {
dlog.error(pos, DiagnosticErrorCode.INVALID_UNICODE, originalText);
}
}
typeTag = TypeTags.STRING;
value = text;
originalValue = textValue;
bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
} else if (type == SyntaxKind.NIL_LITERAL) {
originalValue = "()";
typeTag = TypeTags.NIL;
value = "()";
bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
} else if (type == SyntaxKind.NULL_LITERAL) {
originalValue = "null";
typeTag = TypeTags.NIL;
value = "null";
bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
} else if (type == SyntaxKind.BINARY_EXPRESSION) {
typeTag = TypeTags.BYTE_ARRAY;
value = textValue;
originalValue = textValue;
if (isNumericLiteral(type)) {
bLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression();
} else {
bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
}
} else if (type == SyntaxKind.BYTE_ARRAY_LITERAL) {
return (BLangLiteral) literal.apply(this);
}
bLiteral.pos = getPosition(literal);
bLiteral.type = symTable.getTypeFromTag(typeTag);
bLiteral.type.tag = typeTag;
bLiteral.value = value;
bLiteral.originalValue = originalValue;
return bLiteral;
}
private BLangLiteral createStringLiteral(String value, Location pos) {
BLangLiteral strLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
strLiteral.value = strLiteral.originalValue = value;
strLiteral.type = symTable.stringType;
strLiteral.pos = pos;
return strLiteral;
}
private BLangType createTypeNode(Node type) {
if (type instanceof BuiltinSimpleNameReferenceNode || type.kind() == SyntaxKind.NIL_TYPE_DESC) {
return createBuiltInTypeNode(type);
} else if (type.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE || type.kind() == SyntaxKind.IDENTIFIER_TOKEN) {
BLangUserDefinedType bLUserDefinedType = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode();
BLangNameReference nameReference = createBLangNameReference(type);
bLUserDefinedType.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
bLUserDefinedType.typeName = (BLangIdentifier) nameReference.name;
bLUserDefinedType.pos = getPosition(type);
return bLUserDefinedType;
} else if (type.kind() == SyntaxKind.SIMPLE_NAME_REFERENCE) {
if (type.hasDiagnostics()) {
BLangUserDefinedType bLUserDefinedType = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode();
BLangIdentifier pkgAlias = this.createIdentifier(null, "");
BLangIdentifier name = this.createIdentifier(((SimpleNameReferenceNode) type).name());
BLangNameReference nameReference = new BLangNameReference(getPosition(type), null, pkgAlias, name);
bLUserDefinedType.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
bLUserDefinedType.typeName = (BLangIdentifier) nameReference.name;
bLUserDefinedType.pos = getPosition(type);
return bLUserDefinedType;
}
SimpleNameReferenceNode nameReferenceNode = (SimpleNameReferenceNode) type;
return createTypeNode(nameReferenceNode.name());
}
return (BLangType) type.apply(this);
}
private BLangType createBuiltInTypeNode(Node type) {
String typeText;
if (type.kind() == SyntaxKind.NIL_TYPE_DESC) {
typeText = "()";
} else if (type instanceof BuiltinSimpleNameReferenceNode) {
BuiltinSimpleNameReferenceNode simpleNameRef = (BuiltinSimpleNameReferenceNode) type;
if (simpleNameRef.kind() == SyntaxKind.VAR_TYPE_DESC) {
return null;
} else if (simpleNameRef.name().isMissing()) {
String name = missingNodesHelper.getNextMissingNodeName(packageID);
BLangIdentifier identifier = createIdentifier(getPosition(simpleNameRef.name()), name);
BLangIdentifier pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
return createUserDefinedType(getPosition(type), pkgAlias, identifier);
}
typeText = simpleNameRef.name().text();
} else {
typeText = ((Token) type).text();
}
TypeKind typeKind = TreeUtils.stringToTypeKind(typeText.replaceAll("\\s+", ""));
SyntaxKind kind = type.kind();
switch (kind) {
case BOOLEAN_TYPE_DESC:
case INT_TYPE_DESC:
case BYTE_TYPE_DESC:
case FLOAT_TYPE_DESC:
case DECIMAL_TYPE_DESC:
case STRING_TYPE_DESC:
case ANY_TYPE_DESC:
case NIL_TYPE_DESC:
case HANDLE_TYPE_DESC:
case ANYDATA_TYPE_DESC:
case READONLY_TYPE_DESC:
BLangValueType valueType = (BLangValueType) TreeBuilder.createValueTypeNode();
valueType.typeKind = typeKind;
valueType.pos = getPosition(type);
return valueType;
default:
BLangBuiltInRefTypeNode builtInValueType =
(BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
builtInValueType.typeKind = typeKind;
builtInValueType.pos = getPosition(type);
return builtInValueType;
}
}
private VariableNode createBasicVarNodeWithoutType(Location location, Set<Whitespace> ws,
String identifier, Location identifierLocation,
ExpressionNode expr) {
BLangSimpleVariable bLSimpleVar = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
bLSimpleVar.pos = location;
IdentifierNode name = this.createIdentifier(identifierLocation, identifier, ws);
((BLangIdentifier) name).pos = identifierLocation;
bLSimpleVar.setName(name);
bLSimpleVar.addWS(ws);
if (expr != null) {
bLSimpleVar.setInitialExpression(expr);
}
return bLSimpleVar;
}
private BLangInvocation createBLangInvocation(Node nameNode, NodeList<FunctionArgumentNode> arguments,
Location position, boolean isAsync) {
BLangInvocation bLInvocation;
if (isAsync) {
bLInvocation = (BLangInvocation) TreeBuilder.createActionInvocation();
} else {
bLInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
}
BLangNameReference reference = createBLangNameReference(nameNode);
bLInvocation.pkgAlias = (BLangIdentifier) reference.pkgAlias;
bLInvocation.name = (BLangIdentifier) reference.name;
List<BLangExpression> args = new ArrayList<>();
arguments.iterator().forEachRemaining(arg -> args.add(createExpression(arg)));
bLInvocation.argExprs = args;
bLInvocation.pos = position;
return bLInvocation;
}
private BLangNameReference createBLangNameReference(Node node) {
switch (node.kind()) {
case QUALIFIED_NAME_REFERENCE:
QualifiedNameReferenceNode iNode = (QualifiedNameReferenceNode) node;
Token modulePrefix = iNode.modulePrefix();
IdentifierToken identifier = iNode.identifier();
BLangIdentifier pkgAlias = this.createIdentifier(getPosition(modulePrefix), modulePrefix);
Location namePos = getPosition(identifier);
BLangIdentifier name = this.createIdentifier(namePos, identifier);
return new BLangNameReference(getPosition(node), null, pkgAlias, name);
case ERROR_TYPE_DESC:
node = ((BuiltinSimpleNameReferenceNode) node).name();
break;
case NEW_KEYWORD:
case IDENTIFIER_TOKEN:
case ERROR_KEYWORD:
break;
case SIMPLE_NAME_REFERENCE:
default:
node = ((SimpleNameReferenceNode) node).name();
break;
}
Token iToken = (Token) node;
BLangIdentifier pkgAlias = this.createIdentifier(symTable.builtinPos, "");
BLangIdentifier name = this.createIdentifier(iToken);
return new BLangNameReference(getPosition(node), null, pkgAlias, name);
}
private BLangMarkdownDocumentation createMarkdownDocumentationAttachment(Optional<Node> markdownDocumentationNode) {
if (markdownDocumentationNode == null || !markdownDocumentationNode.isPresent()) {
return null;
}
BLangMarkdownDocumentation doc = (BLangMarkdownDocumentation) TreeBuilder.createMarkdownDocumentationNode();
LinkedList<BLangMarkdownDocumentationLine> documentationLines = new LinkedList<>();
LinkedList<BLangMarkdownParameterDocumentation> parameters = new LinkedList<>();
LinkedList<BLangMarkdownReferenceDocumentation> references = new LinkedList<>();
MarkdownDocumentationNode markdownDocNode = (MarkdownDocumentationNode) markdownDocumentationNode.get();
NodeList<Node> docLineList = markdownDocNode.documentationLines();
BLangMarkdownParameterDocumentation bLangParaDoc = null;
BLangMarkdownReturnParameterDocumentation bLangReturnParaDoc = null;
BLangMarkDownDeprecationDocumentation bLangDeprecationDoc = null;
BLangMarkDownDeprecatedParametersDocumentation bLangDeprecatedParaDoc = null;
for (Node singleDocLine : docLineList) {
switch (singleDocLine.kind()) {
case MARKDOWN_DOCUMENTATION_LINE:
case MARKDOWN_REFERENCE_DOCUMENTATION_LINE:
MarkdownDocumentationLineNode docLineNode = (MarkdownDocumentationLineNode) singleDocLine;
NodeList<Node> docElements = docLineNode.documentElements();
String docText = addReferencesAndReturnDocumentationText(references, docElements);
if (bLangDeprecationDoc != null) {
bLangDeprecationDoc.deprecationDocumentationLines.add(docText);
} else if (bLangReturnParaDoc != null) {
bLangReturnParaDoc.returnParameterDocumentationLines.add(docText);
} else if (bLangParaDoc != null) {
bLangParaDoc.parameterDocumentationLines.add(docText);
} else {
BLangMarkdownDocumentationLine bLangDocLine =
(BLangMarkdownDocumentationLine) TreeBuilder.createMarkdownDocumentationTextNode();
bLangDocLine.text = docText;
bLangDocLine.pos = getPosition(docLineNode);
documentationLines.add(bLangDocLine);
}
break;
case MARKDOWN_PARAMETER_DOCUMENTATION_LINE:
bLangParaDoc = new BLangMarkdownParameterDocumentation();
MarkdownParameterDocumentationLineNode parameterDocLineNode =
(MarkdownParameterDocumentationLineNode) singleDocLine;
BLangIdentifier paraName = new BLangIdentifier();
Token parameterName = parameterDocLineNode.parameterName();
String parameterNameValue = parameterName.isMissing() ? "" :
IdentifierUtils.unescapeUnicodeCodepoints(parameterName.text());
if (stringStartsWithSingleQuote(parameterNameValue)) {
parameterNameValue = parameterNameValue.substring(1);
}
paraName.value = parameterNameValue;
bLangParaDoc.parameterName = paraName;
NodeList<Node> paraDocElements = parameterDocLineNode.documentElements();
String paraDocText = addReferencesAndReturnDocumentationText(references, paraDocElements);
bLangParaDoc.parameterDocumentationLines.add(paraDocText);
bLangParaDoc.pos = getPosition(parameterName);
if (bLangDeprecatedParaDoc != null) {
bLangDeprecatedParaDoc.parameters.add(bLangParaDoc);
} else if (bLangDeprecationDoc != null) {
bLangDeprecatedParaDoc =
new BLangMarkDownDeprecatedParametersDocumentation();
bLangDeprecatedParaDoc.parameters.add(bLangParaDoc);
bLangDeprecationDoc = null;
} else {
parameters.add(bLangParaDoc);
}
break;
case MARKDOWN_RETURN_PARAMETER_DOCUMENTATION_LINE:
bLangReturnParaDoc = new BLangMarkdownReturnParameterDocumentation();
MarkdownParameterDocumentationLineNode returnParaDocLineNode =
(MarkdownParameterDocumentationLineNode) singleDocLine;
NodeList<Node> returnParaDocElements = returnParaDocLineNode.documentElements();
String returnParaDocText =
addReferencesAndReturnDocumentationText(references, returnParaDocElements);
bLangReturnParaDoc.returnParameterDocumentationLines.add(returnParaDocText);
bLangReturnParaDoc.pos = getPosition(returnParaDocLineNode);
doc.returnParameter = bLangReturnParaDoc;
break;
case MARKDOWN_DEPRECATION_DOCUMENTATION_LINE:
bLangDeprecationDoc = new BLangMarkDownDeprecationDocumentation();
MarkdownDocumentationLineNode deprecationDocLineNode =
(MarkdownDocumentationLineNode) singleDocLine;
String lineText = ((Token) deprecationDocLineNode.documentElements().get(0)).text();
bLangDeprecationDoc.addDeprecationLine("
bLangDeprecationDoc.pos = getPosition(deprecationDocLineNode);
break;
case MARKDOWN_CODE_BLOCK:
MarkdownCodeBlockNode codeBlockNode = (MarkdownCodeBlockNode) singleDocLine;
transformCodeBlock(documentationLines, codeBlockNode);
break;
default:
break;
}
}
doc.documentationLines = documentationLines;
doc.parameters = parameters;
doc.references = references;
doc.deprecationDocumentation = bLangDeprecationDoc;
doc.deprecatedParametersDocumentation = bLangDeprecatedParaDoc;
doc.pos = getPosition(markdownDocNode);
return doc;
}
private void transformCodeBlock(LinkedList<BLangMarkdownDocumentationLine> documentationLines,
MarkdownCodeBlockNode codeBlockNode) {
BLangMarkdownDocumentationLine bLangDocLine =
(BLangMarkdownDocumentationLine) TreeBuilder.createMarkdownDocumentationTextNode();
StringBuilder docText = new StringBuilder();
if (codeBlockNode.langAttribute().isPresent()) {
docText.append(codeBlockNode.startBacktick().text());
docText.append(codeBlockNode.langAttribute().get().toString());
} else {
docText.append(codeBlockNode.startBacktick().toString());
}
codeBlockNode.codeLines().forEach(codeLine -> docText.append(codeLine.codeDescription().toString()));
docText.append(codeBlockNode.endBacktick().text());
bLangDocLine.text = docText.toString();
bLangDocLine.pos = getPosition(codeBlockNode.startLineHashToken());
documentationLines.add(bLangDocLine);
}
private String addReferencesAndReturnDocumentationText(LinkedList<BLangMarkdownReferenceDocumentation> references,
NodeList<Node> docElements) {
StringBuilder docText = new StringBuilder();
for (Node element : docElements) {
if (element.kind() == SyntaxKind.BALLERINA_NAME_REFERENCE) {
BLangMarkdownReferenceDocumentation bLangRefDoc = new BLangMarkdownReferenceDocumentation();
BallerinaNameReferenceNode balNameRefNode = (BallerinaNameReferenceNode) element;
bLangRefDoc.pos = getPosition(balNameRefNode);
Token startBacktick = balNameRefNode.startBacktick();
Node backtickContent = balNameRefNode.nameReference();
Token endBacktick = balNameRefNode.endBacktick();
String contentString = backtickContent.isMissing() ? "" : backtickContent.toString();
bLangRefDoc.referenceName = contentString;
bLangRefDoc.type = DocumentationReferenceType.BACKTICK_CONTENT;
Optional<Token> referenceType = balNameRefNode.referenceType();
referenceType.ifPresent(
refType -> {
bLangRefDoc.type = stringToRefType(refType.text());
docText.append(refType.toString());
}
);
transformDocumentationBacktickContent(backtickContent, bLangRefDoc);
docText.append(startBacktick.isMissing() ? "" : startBacktick.text());
docText.append(contentString);
docText.append(endBacktick.isMissing() ? "" : endBacktick.text());
references.add(bLangRefDoc);
} else if (element.kind() == SyntaxKind.DOCUMENTATION_DESCRIPTION) {
Token docDescription = (Token) element;
docText.append(docDescription.text());
} else if (element.kind() == SyntaxKind.INLINE_CODE_REFERENCE) {
InlineCodeReferenceNode inlineCodeRefNode = (InlineCodeReferenceNode) element;
docText.append(inlineCodeRefNode.startBacktick().text());
docText.append(inlineCodeRefNode.codeReference().text());
docText.append(inlineCodeRefNode.endBacktick().text());
}
}
return trimLeftAtMostOne(docText.toString());
}
private String trimLeftAtMostOne(String text) {
int countToStrip = 0;
if (!text.isEmpty() && Character.isWhitespace(text.charAt(0))) {
countToStrip = 1;
}
return text.substring(countToStrip);
}
private void transformDocumentationBacktickContent(Node backtickContent,
BLangMarkdownReferenceDocumentation bLangRefDoc) {
QualifiedNameReferenceNode qualifiedRef;
SimpleNameReferenceNode simpleRef;
switch (backtickContent.kind()) {
case CODE_CONTENT:
bLangRefDoc.hasParserWarnings = true;
break;
case QUALIFIED_NAME_REFERENCE:
qualifiedRef = (QualifiedNameReferenceNode) backtickContent;
bLangRefDoc.qualifier = qualifiedRef.modulePrefix().text();
bLangRefDoc.identifier = qualifiedRef.identifier().text();
break;
case SIMPLE_NAME_REFERENCE:
simpleRef = (SimpleNameReferenceNode) backtickContent;
bLangRefDoc.identifier = simpleRef.name().text();
break;
case FUNCTION_CALL:
Node funcName = (((FunctionCallExpressionNode) backtickContent).functionName());
if (funcName.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
qualifiedRef = (QualifiedNameReferenceNode) funcName;
bLangRefDoc.qualifier = qualifiedRef.modulePrefix().text();
bLangRefDoc.identifier = qualifiedRef.identifier().text();
} else {
simpleRef = (SimpleNameReferenceNode) funcName;
bLangRefDoc.identifier = simpleRef.name().text();
}
break;
case METHOD_CALL:
MethodCallExpressionNode methodCallExprNode = (MethodCallExpressionNode) backtickContent;
bLangRefDoc.identifier =
((SimpleNameReferenceNode) methodCallExprNode.methodName()).name().text();
Node refName = methodCallExprNode.expression();
if (refName.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
qualifiedRef = (QualifiedNameReferenceNode) refName;
bLangRefDoc.qualifier = qualifiedRef.modulePrefix().text();
bLangRefDoc.typeName = qualifiedRef.identifier().text();
} else {
simpleRef = (SimpleNameReferenceNode) refName;
bLangRefDoc.typeName = simpleRef.name().text();
}
break;
default:
throw new IllegalArgumentException("Invalid backtick content transformation");
}
if (bLangRefDoc.identifier != null) {
bLangRefDoc.identifier = IdentifierUtils.unescapeUnicodeCodepoints(bLangRefDoc.identifier);
if (stringStartsWithSingleQuote(bLangRefDoc.identifier)) {
bLangRefDoc.identifier = bLangRefDoc.identifier.substring(1);
}
}
if (bLangRefDoc.qualifier != null) {
bLangRefDoc.qualifier = IdentifierUtils.unescapeUnicodeCodepoints(bLangRefDoc.qualifier);
if (stringStartsWithSingleQuote(bLangRefDoc.qualifier)) {
bLangRefDoc.qualifier = bLangRefDoc.qualifier.substring(1);
}
}
}
private DocumentationReferenceType stringToRefType(String refTypeName) {
switch (refTypeName) {
case "type":
return DocumentationReferenceType.TYPE;
case "service":
return DocumentationReferenceType.SERVICE;
case "variable":
return DocumentationReferenceType.VARIABLE;
case "var":
return DocumentationReferenceType.VAR;
case "annotation":
return DocumentationReferenceType.ANNOTATION;
case "module":
return DocumentationReferenceType.MODULE;
case "function":
return DocumentationReferenceType.FUNCTION;
case "parameter":
return DocumentationReferenceType.PARAMETER;
case "const":
return DocumentationReferenceType.CONST;
default:
return DocumentationReferenceType.BACKTICK_CONTENT;
}
}
private Object getIntegerLiteral(Node literal, String nodeValue, SyntaxKind sign) {
SyntaxKind literalTokenKind = ((BasicLiteralNode) literal).literalToken().kind();
if (literalTokenKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN) {
return parseLong(literal, nodeValue, nodeValue, 10, sign, DiagnosticErrorCode.INTEGER_TOO_SMALL,
DiagnosticErrorCode.INTEGER_TOO_LARGE);
} else if (literalTokenKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) {
String processedNodeValue = nodeValue.toLowerCase().replace("0x", "");
return parseLong(literal, nodeValue, processedNodeValue, 16, sign,
DiagnosticErrorCode.HEXADECIMAL_TOO_SMALL, DiagnosticErrorCode.HEXADECIMAL_TOO_LARGE);
}
return null;
}
private Object parseLong(Node literal, String originalNodeValue,
String processedNodeValue, int radix, SyntaxKind sign,
DiagnosticCode code1, DiagnosticCode code2) {
try {
return Long.parseLong(processedNodeValue, radix);
} catch (Exception e) {
Location pos = getPosition(literal);
if (sign == SyntaxKind.MINUS_TOKEN) {
pos = new BLangDiagnosticLocation(pos.lineRange().filePath(),
pos.lineRange().startLine().line(),
pos.lineRange().endLine().line(),
pos.lineRange().startLine().offset() - 1,
pos.lineRange().endLine().offset());
dlog.error(pos, code1, originalNodeValue);
} else {
dlog.error(pos, code2, originalNodeValue);
}
}
return originalNodeValue;
}
private String getHexNodeValue(String value) {
if (!(value.contains("p") || value.contains("P"))) {
value = value + "p0";
}
return value;
}
private String fillWithZeros(String str) {
while (str.length() < 4) {
str = "0".concat(str);
}
return str;
}
private void markVariableWithFlag(BLangVariable variable, Flag flag) {
variable.flagSet.add(flag);
switch (variable.getKind()) {
case TUPLE_VARIABLE:
BLangTupleVariable tupleVariable = (BLangTupleVariable) variable;
for (BLangVariable var : tupleVariable.memberVariables) {
markVariableWithFlag(var, flag);
}
if (tupleVariable.restVariable != null) {
markVariableWithFlag(tupleVariable.restVariable, flag);
}
break;
case RECORD_VARIABLE:
BLangRecordVariable recordVariable = (BLangRecordVariable) variable;
for (BLangRecordVariableKeyValue keyValue : recordVariable.variableList) {
markVariableWithFlag(keyValue.getValue(), flag);
}
if (recordVariable.restParam != null) {
markVariableWithFlag((BLangVariable) recordVariable.restParam, flag);
}
break;
case ERROR_VARIABLE:
BLangErrorVariable errorVariable = (BLangErrorVariable) variable;
BLangSimpleVariable message = errorVariable.message;
if (message != null) {
markVariableWithFlag(message, flag);
}
BLangVariable cause = errorVariable.cause;
if (cause != null) {
markVariableWithFlag(cause, flag);
}
errorVariable.detail.forEach(entry -> markVariableWithFlag(entry.valueBindingPattern, flag));
if (errorVariable.restDetail != null) {
markVariableWithFlag(errorVariable.restDetail, flag);
}
break;
}
}
private boolean isSimpleLiteral(SyntaxKind syntaxKind) {
switch (syntaxKind) {
case STRING_LITERAL:
case NUMERIC_LITERAL:
case BOOLEAN_LITERAL:
case NIL_LITERAL:
case NULL_LITERAL:
return true;
default:
return false;
}
}
static boolean isType(SyntaxKind nodeKind) {
switch (nodeKind) {
case RECORD_TYPE_DESC:
case OBJECT_TYPE_DESC:
case NIL_TYPE_DESC:
case OPTIONAL_TYPE_DESC:
case ARRAY_TYPE_DESC:
case INT_TYPE_DESC:
case BYTE_TYPE_DESC:
case FLOAT_TYPE_DESC:
case DECIMAL_TYPE_DESC:
case STRING_TYPE_DESC:
case BOOLEAN_TYPE_DESC:
case XML_TYPE_DESC:
case JSON_TYPE_DESC:
case HANDLE_TYPE_DESC:
case ANY_TYPE_DESC:
case ANYDATA_TYPE_DESC:
case NEVER_TYPE_DESC:
case VAR_TYPE_DESC:
case SERVICE_TYPE_DESC:
case PARAMETERIZED_TYPE_DESC:
case UNION_TYPE_DESC:
case ERROR_TYPE_DESC:
case STREAM_TYPE_DESC:
case TABLE_TYPE_DESC:
case FUNCTION_TYPE_DESC:
case TUPLE_TYPE_DESC:
case PARENTHESISED_TYPE_DESC:
case READONLY_TYPE_DESC:
case DISTINCT_TYPE_DESC:
case INTERSECTION_TYPE_DESC:
case SINGLETON_TYPE_DESC:
case TYPE_REFERENCE_TYPE_DESC:
return true;
default:
return false;
}
}
private boolean isNumericLiteral(SyntaxKind syntaxKind) {
switch (syntaxKind) {
case NUMERIC_LITERAL:
return true;
default:
return false;
}
}
private boolean isPresent(Node node) {
return node.kind() != SyntaxKind.NONE;
}
private boolean checkIfAnonymous(Node node) {
SyntaxKind parentKind = node.parent().kind();
return parentKind != SyntaxKind.DISTINCT_TYPE_DESC && parentKind != SyntaxKind.TYPE_DEFINITION;
}
private boolean ifInLocalContext(Node parent) {
while (parent != null) {
if (parent instanceof StatementNode) {
return true;
}
parent = parent.parent();
}
return false;
}
private BLangType createAnonymousRecordType(RecordTypeDescriptorNode recordTypeDescriptorNode,
BLangRecordTypeNode recordTypeNode) {
BLangTypeDefinition typeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
Location pos = getPosition(recordTypeDescriptorNode);
String genName = anonymousModelHelper.getNextAnonymousTypeKey(this.packageID);
IdentifierNode anonTypeGenName = createIdentifier(symTable.builtinPos, genName, null);
typeDef.setName(anonTypeGenName);
typeDef.flagSet.add(Flag.PUBLIC);
typeDef.flagSet.add(Flag.ANONYMOUS);
typeDef.typeNode = recordTypeNode;
typeDef.pos = pos;
addToTop(typeDef);
return createUserDefinedType(pos, (BLangIdentifier) TreeBuilder.createIdentifierNode(), typeDef.name);
}
private BLangUserDefinedType createUserDefinedType(Location pos,
BLangIdentifier pkgAlias,
BLangIdentifier name) {
BLangUserDefinedType userDefinedType = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode();
userDefinedType.pos = pos;
userDefinedType.pkgAlias = pkgAlias;
userDefinedType.typeName = name;
return userDefinedType;
}
private boolean withinByteRange(Object num) {
if (num instanceof Long) {
return (Long) num <= 255 && (Long) num >= 0;
}
return false;
}
private class SimpleVarBuilder {
private BLangIdentifier name;
private BLangType type;
private boolean isDeclaredWithVar;
private Set<Flag> flags = new HashSet<>();
private boolean isFinal;
private ExpressionNode expr;
private Location pos;
public BLangSimpleVariable build() {
BLangSimpleVariable bLSimpleVar = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
bLSimpleVar.setName(this.name);
bLSimpleVar.setTypeNode(this.type);
bLSimpleVar.isDeclaredWithVar = this.isDeclaredWithVar;
bLSimpleVar.setTypeNode(this.type);
bLSimpleVar.flagSet.addAll(this.flags);
if (this.isFinal) {
markVariableWithFlag(bLSimpleVar, Flag.FINAL);
}
bLSimpleVar.setInitialExpression(this.expr);
bLSimpleVar.pos = pos;
return bLSimpleVar;
}
public SimpleVarBuilder with(String name) {
this.name = createIdentifier(null, name);
return this;
}
public SimpleVarBuilder with(String name, Location identifierPos) {
this.name = createIdentifier(identifierPos, name);
return this;
}
public SimpleVarBuilder with(Token token) {
this.name = createIdentifier(token);
return this;
}
public SimpleVarBuilder setTypeByNode(Node typeName) {
this.isDeclaredWithVar = typeName == null || typeName.kind() == SyntaxKind.VAR_TYPE_DESC;
if (typeName == null) {
return this;
}
this.type = createTypeNode(typeName);
return this;
}
public SimpleVarBuilder setExpressionByNode(Node initExprNode) {
this.expr = initExprNode != null ? createExpression(initExprNode) : null;
return this;
}
public SimpleVarBuilder setExpression(ExpressionNode expression) {
this.expr = expression;
return this;
}
public SimpleVarBuilder isDeclaredWithVar() {
this.isDeclaredWithVar = true;
return this;
}
public SimpleVarBuilder isFinal() {
this.isFinal = true;
return this;
}
public SimpleVarBuilder isListenerVar() {
this.flags.add(Flag.LISTENER);
this.flags.add(Flag.FINAL);
return this;
}
public SimpleVarBuilder setVisibility(Token visibilityQualifier) {
if (visibilityQualifier != null) {
if (visibilityQualifier.kind() == SyntaxKind.PRIVATE_KEYWORD) {
this.flags.add(Flag.PRIVATE);
} else if (visibilityQualifier.kind() == SyntaxKind.PUBLIC_KEYWORD) {
this.flags.add(Flag.PUBLIC);
}
}
return this;
}
public SimpleVarBuilder setFinal(boolean present) {
this.isFinal = present;
return this;
}
public SimpleVarBuilder setOptional(boolean present) {
if (present) {
this.flags.add(Flag.PUBLIC);
} else {
this.flags.remove(Flag.PUBLIC);
}
return this;
}
public SimpleVarBuilder setRequired(boolean present) {
if (present) {
this.flags.add(Flag.REQUIRED);
} else {
this.flags.remove(Flag.REQUIRED);
}
return this;
}
public SimpleVarBuilder isPublic() {
this.flags.add(Flag.PUBLIC);
return this;
}
public SimpleVarBuilder isWorkerVar() {
this.flags.add(Flag.WORKER);
return this;
}
public SimpleVarBuilder setPos(Location pos) {
this.pos = pos;
return this;
}
}
private void addFinalQualifier(BLangSimpleVariable simpleVar) {
simpleVar.flagSet.add(Flag.FINAL);
}
private void addToTop(TopLevelNode topLevelNode) {
if (currentCompilationUnit != null) {
currentCompilationUnit.addTopLevelNode(topLevelNode);
}
}
private Location expandLeft(Location location, Location upTo) {
assert location.lineRange().startLine().line() > upTo.lineRange().startLine().line() ||
(location.lineRange().startLine().line() == upTo.lineRange().startLine().line() &&
location.lineRange().startLine().offset() >= upTo.lineRange().startLine().offset());
Location expandedLocation = new BLangDiagnosticLocation(location.lineRange().filePath(),
upTo.lineRange().startLine().line(),
location.lineRange().endLine().line(),
upTo.lineRange().startLine().offset(),
location.lineRange().endLine().offset());
return expandedLocation;
}
private Location trimLeft(Location location, Location upTo) {
assert location.lineRange().startLine().line() < upTo.lineRange().startLine().line() ||
(location.lineRange().startLine().line() == upTo.lineRange().startLine().line() &&
location.lineRange().startLine().offset() <= upTo.lineRange().startLine().offset());
Location trimmedLocation = new BLangDiagnosticLocation(location.lineRange().filePath(),
upTo.lineRange().startLine().line(),
location.lineRange().endLine().line(),
upTo.lineRange().startLine().offset(),
location.lineRange().endLine().offset());
return trimmedLocation;
}
private Location trimRight(Location location, Location upTo) {
assert location.lineRange().endLine().line() > upTo.lineRange().endLine().line() ||
(location.lineRange().endLine().line() == upTo.lineRange().endLine().line() &&
location.lineRange().endLine().offset() >= upTo.lineRange().endLine().offset());
Location trimmedLocation = new BLangDiagnosticLocation(location.lineRange().filePath(),
location.lineRange().startLine().line(),
upTo.lineRange().endLine().line(),
location.lineRange().startLine().offset(),
upTo.lineRange().endLine().offset());
return trimmedLocation;
}
private void setClassQualifiers(NodeList<Token> qualifiers, BLangClassDefinition blangClass) {
for (Token qualifier : qualifiers) {
SyntaxKind kind = qualifier.kind();
switch (kind) {
case DISTINCT_KEYWORD:
blangClass.flagSet.add(Flag.DISTINCT);
break;
case CLIENT_KEYWORD:
blangClass.flagSet.add(Flag.CLIENT);
break;
case READONLY_KEYWORD:
blangClass.flagSet.add(Flag.READONLY);
break;
case SERVICE_KEYWORD:
blangClass.flagSet.add(Flag.SERVICE);
break;
case ISOLATED_KEYWORD:
blangClass.flagSet.add(Flag.ISOLATED);
break;
default:
throw new RuntimeException("Syntax kind is not supported: " + kind);
}
}
}
} | class BLangNodeTransformer extends NodeTransformer<BLangNode> {
private static final String IDENTIFIER_LITERAL_PREFIX = "'";
private BLangDiagnosticLog dlog;
private SymbolTable symTable;
private PackageCache packageCache;
private PackageID packageID;
private String currentCompUnitName;
private BLangCompilationUnit currentCompilationUnit;
private BLangAnonymousModelHelper anonymousModelHelper;
private BLangMissingNodesHelper missingNodesHelper;
/* To keep track of additional statements produced from multi-BLangNode resultant transformations */
private Stack<BLangStatement> additionalStatements = new Stack<>();
/* To keep track if we are inside a block statment for the use of type definition creation */
private boolean isInLocalContext = false;
public BLangNodeTransformer(CompilerContext context,
PackageID packageID, String entryName) {
this.dlog = BLangDiagnosticLog.getInstance(context);
this.dlog.setCurrentPackageId(packageID);
this.symTable = SymbolTable.getInstance(context);
this.packageID = packageID;
this.currentCompUnitName = entryName;
this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context);
this.missingNodesHelper = BLangMissingNodesHelper.getInstance(context);
}
public List<org.ballerinalang.model.tree.Node> accept(Node node) {
BLangNode bLangNode = node.apply(this);
List<org.ballerinalang.model.tree.Node> nodes = new ArrayList<>();
while (!additionalStatements.empty()) {
nodes.add(additionalStatements.pop());
}
nodes.add(bLangNode);
return nodes;
}
@Override
public BLangNode transform(IdentifierToken identifierToken) {
return this.createIdentifier(getPosition(identifierToken), identifierToken);
}
private Optional<Node> getDocumentationString(Optional<MetadataNode> metadataNode) {
return metadataNode.map(MetadataNode::documentationString).orElse(null);
}
private NodeList<AnnotationNode> getAnnotations(Optional<MetadataNode> metadataNode) {
return metadataNode.map(MetadataNode::annotations).orElse(null);
}
private Location getPosition(Node node) {
if (node == null) {
return null;
}
LineRange lineRange = node.lineRange();
LinePosition startPos = lineRange.startLine();
LinePosition endPos = lineRange.endLine();
return new BLangDiagnosticLocation(currentCompUnitName,
startPos.line(),
endPos.line(),
startPos.offset(),
endPos.offset());
}
private Location getPosition(Node startNode, Node endNode) {
if (startNode == null || endNode == null) {
return null;
}
LinePosition startPos = startNode.lineRange().startLine();
LinePosition endPos = endNode.lineRange().endLine();
return new BLangDiagnosticLocation(currentCompUnitName, startPos.line(), endPos.line(),
startPos.offset(), endPos.offset());
}
private Location getPositionWithoutMetadata(Node node) {
if (node == null) {
return null;
}
LineRange nodeLineRange = node.lineRange();
NonTerminalNode nonTerminalNode = (NonTerminalNode) node;
ChildNodeList children = nonTerminalNode.children();
LinePosition startPos;
if (children.get(0).kind() == SyntaxKind.METADATA) {
startPos = children.get(1).lineRange().startLine();
} else {
startPos = nodeLineRange.startLine();
}
LinePosition endPos = nodeLineRange.endLine();
return new BLangDiagnosticLocation(currentCompUnitName,
startPos.line(),
endPos.line(),
startPos.offset(),
endPos.offset());
}
@Override
public BLangNode transform(ModulePartNode modulePart) {
BLangCompilationUnit compilationUnit = (BLangCompilationUnit) TreeBuilder.createCompilationUnit();
this.currentCompilationUnit = compilationUnit;
compilationUnit.name = currentCompUnitName;
compilationUnit.setPackageID(packageID);
Location pos = getPosition(modulePart);
for (ImportDeclarationNode importDecl : modulePart.imports()) {
BLangImportPackage bLangImport = (BLangImportPackage) importDecl.apply(this);
bLangImport.compUnit = this.createIdentifier(pos, compilationUnit.getName());
compilationUnit.addTopLevelNode(bLangImport);
}
for (ModuleMemberDeclarationNode member : modulePart.members()) {
compilationUnit.addTopLevelNode((TopLevelNode) member.apply(this));
}
Location newLocation = new BLangDiagnosticLocation(pos.lineRange().filePath(), 0, 0, 0, 0);
compilationUnit.pos = newLocation;
compilationUnit.setPackageID(packageID);
this.currentCompilationUnit = null;
return compilationUnit;
}
@Override
public BLangNode transform(ModuleVariableDeclarationNode modVarDeclrNode) {
TypedBindingPatternNode typedBindingPattern = modVarDeclrNode.typedBindingPattern();
BindingPatternNode bindingPatternNode = typedBindingPattern.bindingPattern();
BLangVariable variable = getBLangVariableNode(bindingPatternNode);
if (modVarDeclrNode.visibilityQualifier().isPresent()) {
markVariableWithFlag(variable, Flag.PUBLIC);
}
initializeBLangVariable(variable, typedBindingPattern.typeDescriptor(), modVarDeclrNode.initializer(),
modVarDeclrNode.qualifiers());
NodeList<AnnotationNode> annotations = getAnnotations(modVarDeclrNode.metadata());
if (annotations != null) {
variable.annAttachments = applyAll(annotations);
}
variable.pos = getPositionWithoutMetadata(modVarDeclrNode);
variable.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(modVarDeclrNode.metadata()));
return variable;
}
@Override
public BLangNode transform(ImportDeclarationNode importDeclaration) {
ImportOrgNameNode orgNameNode = importDeclaration.orgName().orElse(null);
Optional<ImportPrefixNode> prefixNode = importDeclaration.prefix();
Token prefix = prefixNode.isPresent() ? prefixNode.get().prefix() : null;
Token orgName = null;
if (orgNameNode != null) {
orgName = orgNameNode.orgName();
}
String version = null;
List<BLangIdentifier> pkgNameComps = new ArrayList<>();
NodeList<IdentifierToken> names = importDeclaration.moduleName();
Location position = getPosition(importDeclaration);
names.forEach(name -> pkgNameComps.add(this.createIdentifier(getPosition(name), name.text(), null)));
BLangImportPackage importDcl = (BLangImportPackage) TreeBuilder.createImportPackageNode();
importDcl.pos = position;
importDcl.pkgNameComps = pkgNameComps;
importDcl.orgName = this.createIdentifier(getPosition(orgNameNode), orgName);
importDcl.version = this.createIdentifier(null, version);
importDcl.alias = (prefix != null) ? this.createIdentifier(getPosition(prefix), prefix)
: pkgNameComps.get(pkgNameComps.size() - 1);
return importDcl;
}
@Override
public BLangNode transform(MethodDeclarationNode methodDeclarationNode) {
BLangFunction bLFunction;
if (methodDeclarationNode.relativeResourcePath().isEmpty()) {
bLFunction = createFunctionNode(methodDeclarationNode.methodName(),
methodDeclarationNode.qualifierList(), methodDeclarationNode.methodSignature(), null);
} else {
bLFunction = createResourceFunctionNode(methodDeclarationNode.methodName(),
methodDeclarationNode.qualifierList(), methodDeclarationNode.relativeResourcePath(),
methodDeclarationNode.methodSignature(), null);
}
bLFunction.annAttachments = applyAll(getAnnotations(methodDeclarationNode.metadata()));
bLFunction.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(methodDeclarationNode.metadata()));
bLFunction.pos = getPositionWithoutMetadata(methodDeclarationNode);
return bLFunction;
}
@Override
public BLangNode transform(ResourcePathParameterNode resourcePathParameterNode) {
BLangSimpleVariable pathParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
pathParam.name = createIdentifier(resourcePathParameterNode.paramName());
BLangType typeNode = (BLangType) resourcePathParameterNode.typeDescriptor().apply(this);
pathParam.pos = getPosition(resourcePathParameterNode);
pathParam.annAttachments = applyAll(resourcePathParameterNode.annotations());
if (resourcePathParameterNode.kind() == SyntaxKind.RESOURCE_PATH_REST_PARAM) {
BLangArrayType arrayTypeNode = (BLangArrayType) TreeBuilder.createArrayTypeNode();
arrayTypeNode.elemtype = typeNode;
arrayTypeNode.dimensions = 1;
typeNode = arrayTypeNode;
}
pathParam.typeNode = typeNode;
return pathParam;
}
private BLangFunction createResourceFunctionNode(IdentifierToken accessorName,
NodeList<Token> qualifierList,
NodeList<Node> relativeResourcePath,
FunctionSignatureNode methodSignature,
FunctionBodyNode functionBody) {
BLangResourceFunction bLFunction = (BLangResourceFunction) TreeBuilder.createResourceFunctionNode();
String resourceFuncName = calculateResourceFunctionName(accessorName, relativeResourcePath);
BLangIdentifier name = createIdentifier(getPosition(accessorName), resourceFuncName);
populateFunctionNode(name, qualifierList, methodSignature, functionBody, bLFunction);
bLFunction.methodName = createIdentifier(accessorName);
bLFunction.resourcePath = new ArrayList<>();
List<BLangSimpleVariable> params = new ArrayList<>();
for (Node pathSegment : relativeResourcePath) {
switch (pathSegment.kind()) {
case SLASH_TOKEN:
continue;
case RESOURCE_PATH_SEGMENT_PARAM:
BLangSimpleVariable param = (BLangSimpleVariable) pathSegment.apply(this);
params.add(param);
bLFunction.addPathParam(param);
bLFunction.resourcePath.add(createIdentifier(getPosition(pathSegment), "*"));
break;
case RESOURCE_PATH_REST_PARAM:
BLangSimpleVariable restParam = (BLangSimpleVariable) pathSegment.apply(this);
params.add(restParam);
bLFunction.setRestPathParam(restParam);
bLFunction.resourcePath.add(createIdentifier(getPosition(pathSegment), "**"));
break;
default:
bLFunction.resourcePath.add(createIdentifier((Token) pathSegment));
break;
}
}
bLFunction.getParameters().addAll(0, params);
return bLFunction;
}
private String calculateResourceFunctionName(IdentifierToken accessorName, NodeList<Node> relativeResourcePath) {
StringBuilder sb = new StringBuilder();
sb.append("$");
sb.append(createIdentifier(accessorName).getValue());
for (Node token : relativeResourcePath) {
switch (token.kind()) {
case SLASH_TOKEN:
continue;
case RESOURCE_PATH_SEGMENT_PARAM:
sb.append("$*");
break;
case RESOURCE_PATH_REST_PARAM:
sb.append("$**");
break;
default:
sb.append("$");
String value = createIdentifier((Token) token).getValue();
sb.append(value);
}
}
return sb.toString();
}
@Override
public BLangNode transform(ConstantDeclarationNode constantDeclarationNode) {
BLangConstant constantNode = (BLangConstant) TreeBuilder.createConstantNode();
Location pos = getPositionWithoutMetadata(constantDeclarationNode);
Location identifierPos = getPosition(constantDeclarationNode.variableName());
constantNode.name = createIdentifier(identifierPos, constantDeclarationNode.variableName());
constantNode.expr = createExpression(constantDeclarationNode.initializer());
constantNode.pos = pos;
if (constantDeclarationNode.typeDescriptor().isPresent()) {
constantNode.typeNode = createTypeNode(constantDeclarationNode.typeDescriptor().orElse(null));
}
constantNode.annAttachments = applyAll(getAnnotations(constantDeclarationNode.metadata()));
constantNode.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(constantDeclarationNode.metadata()));
constantNode.flagSet.add(Flag.CONSTANT);
if (constantDeclarationNode.visibilityQualifier().isPresent() &&
constantDeclarationNode.visibilityQualifier().orElse(null).kind() == SyntaxKind.PUBLIC_KEYWORD) {
constantNode.flagSet.add(Flag.PUBLIC);
}
NodeKind nodeKind = constantNode.expr.getKind();
if (nodeKind == NodeKind.LITERAL || nodeKind == NodeKind.NUMERIC_LITERAL) {
BLangLiteral literal = nodeKind == NodeKind.LITERAL ?
(BLangLiteral) TreeBuilder.createLiteralExpression() :
(BLangLiteral) TreeBuilder.createNumericLiteralExpression();
literal.setValue(((BLangLiteral) constantNode.expr).value);
literal.type = constantNode.expr.type;
literal.isConstant = true;
BLangFiniteTypeNode finiteTypeNode = (BLangFiniteTypeNode) TreeBuilder.createFiniteTypeNode();
finiteTypeNode.valueSpace.add(literal);
BLangTypeDefinition typeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
String genName = anonymousModelHelper.getNextAnonymousTypeKey(packageID);
IdentifierNode anonTypeGenName = createIdentifier(identifierPos, genName);
typeDef.setName(anonTypeGenName);
typeDef.flagSet.add(Flag.PUBLIC);
typeDef.flagSet.add(Flag.ANONYMOUS);
typeDef.typeNode = finiteTypeNode;
typeDef.pos = pos;
constantNode.associatedTypeDefinition = typeDef;
}
return constantNode;
}
public BLangNode transform(TypeDefinitionNode typeDefNode) {
BLangTypeDefinition typeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
BLangIdentifier identifierNode =
this.createIdentifier(typeDefNode.typeName());
typeDef.setName(identifierNode);
typeDef.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(typeDefNode.metadata()));
typeDef.typeNode = createTypeNode(typeDefNode.typeDescriptor());
typeDefNode.visibilityQualifier().ifPresent(visibilityQual -> {
if (visibilityQual.kind() == SyntaxKind.PUBLIC_KEYWORD) {
typeDef.flagSet.add(Flag.PUBLIC);
}
});
typeDef.pos = getPositionWithoutMetadata(typeDefNode);
typeDef.annAttachments = applyAll(getAnnotations(typeDefNode.metadata()));
return typeDef;
}
@Override
public BLangNode transform(UnionTypeDescriptorNode unionTypeDescriptorNode) {
List<TypeDescriptorNode> nodes = flattenUnionType(unionTypeDescriptorNode);
List<TypeDescriptorNode> finiteTypeElements = new ArrayList<>();
List<List<TypeDescriptorNode>> unionTypeElementsCollection = new ArrayList<>();
for (TypeDescriptorNode type : nodes) {
if (type.kind() == SyntaxKind.SINGLETON_TYPE_DESC) {
finiteTypeElements.add(type);
unionTypeElementsCollection.add(new ArrayList<>());
} else {
List<TypeDescriptorNode> lastOfOthers;
if (unionTypeElementsCollection.isEmpty()) {
lastOfOthers = new ArrayList<>();
unionTypeElementsCollection.add(lastOfOthers);
} else {
lastOfOthers = unionTypeElementsCollection.get(unionTypeElementsCollection.size() - 1);
}
lastOfOthers.add(type);
}
}
List<TypeDescriptorNode> unionElements = new ArrayList<>();
reverseFlatMap(unionTypeElementsCollection, unionElements);
BLangFiniteTypeNode bLangFiniteTypeNode = (BLangFiniteTypeNode) TreeBuilder.createFiniteTypeNode();
for (TypeDescriptorNode finiteTypeEl : finiteTypeElements) {
SingletonTypeDescriptorNode singletonTypeNode = (SingletonTypeDescriptorNode) finiteTypeEl;
BLangLiteral literal = createSimpleLiteral(singletonTypeNode.simpleContExprNode(), true);
bLangFiniteTypeNode.addValue(literal);
}
if (unionElements.isEmpty()) {
return bLangFiniteTypeNode;
}
BLangUnionTypeNode unionTypeNode = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode();
unionTypeNode.pos = getPosition(unionTypeDescriptorNode);
for (TypeDescriptorNode unionElement : unionElements) {
unionTypeNode.memberTypeNodes.add(createTypeNode(unionElement));
}
if (!finiteTypeElements.isEmpty()) {
unionTypeNode.memberTypeNodes.add(deSugarTypeAsUserDefType(bLangFiniteTypeNode));
}
return unionTypeNode;
}
private List<TypeDescriptorNode> flattenUnionType(UnionTypeDescriptorNode unionTypeDescriptorNode) {
List<TypeDescriptorNode> list = new ArrayList<>();
flattenUnionType(list, unionTypeDescriptorNode);
return list;
}
private void flattenUnionType(List<TypeDescriptorNode> list, TypeDescriptorNode typeDescriptorNode) {
if (typeDescriptorNode.kind() != SyntaxKind.UNION_TYPE_DESC) {
list.add(typeDescriptorNode);
return;
}
UnionTypeDescriptorNode unionTypeDescriptorNode = (UnionTypeDescriptorNode) typeDescriptorNode;
updateListWithNonUnionTypes(list, unionTypeDescriptorNode.leftTypeDesc());
updateListWithNonUnionTypes(list, unionTypeDescriptorNode.rightTypeDesc());
}
private void updateListWithNonUnionTypes(List<TypeDescriptorNode> list, TypeDescriptorNode typeDescNode) {
if (typeDescNode.kind() != SyntaxKind.UNION_TYPE_DESC) {
list.add(typeDescNode);
} else {
flattenUnionType(list, typeDescNode);
}
}
private <T> void reverseFlatMap(List<List<T>> listOfLists, List<T> result) {
for (int i = listOfLists.size() - 1; i >= 0; i--) {
result.addAll(listOfLists.get(i));
}
}
private BLangUserDefinedType deSugarTypeAsUserDefType(BLangType toIndirect) {
BLangTypeDefinition bLTypeDef = createTypeDefinitionWithTypeNode(toIndirect);
Location pos = toIndirect.pos;
addToTop(bLTypeDef);
return createUserDefinedType(pos, (BLangIdentifier) TreeBuilder.createIdentifierNode(), bLTypeDef.name);
}
private BLangTypeDefinition createTypeDefinitionWithTypeNode(BLangType toIndirect) {
Location pos = toIndirect.pos;
BLangTypeDefinition bLTypeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
String genName = anonymousModelHelper.getNextAnonymousTypeKey(packageID);
IdentifierNode anonTypeGenName = createIdentifier(symTable.builtinPos, genName);
bLTypeDef.setName(anonTypeGenName);
bLTypeDef.flagSet.add(Flag.PUBLIC);
bLTypeDef.flagSet.add(Flag.ANONYMOUS);
bLTypeDef.typeNode = toIndirect;
bLTypeDef.pos = pos;
return bLTypeDef;
}
@Override
public BLangNode transform(ParenthesisedTypeDescriptorNode parenthesisedTypeDescriptorNode) {
BLangType typeNode = createTypeNode(parenthesisedTypeDescriptorNode.typedesc());
typeNode.grouped = true;
return typeNode;
}
@Override
public BLangNode transform(TypeParameterNode typeParameterNode) {
return createTypeNode(typeParameterNode.typeNode());
}
@Override
public BLangNode transform(TupleTypeDescriptorNode tupleTypeDescriptorNode) {
BLangTupleTypeNode tupleTypeNode = (BLangTupleTypeNode) TreeBuilder.createTupleTypeNode();
SeparatedNodeList<Node> types = tupleTypeDescriptorNode.memberTypeDesc();
for (int i = 0; i < types.size(); i++) {
Node node = types.get(i);
if (node.kind() == SyntaxKind.REST_TYPE) {
RestDescriptorNode restDescriptor = (RestDescriptorNode) node;
tupleTypeNode.restParamType = createTypeNode(restDescriptor.typeDescriptor());
} else {
tupleTypeNode.memberTypeNodes.add(createTypeNode(node));
}
}
tupleTypeNode.pos = getPosition(tupleTypeDescriptorNode);
return tupleTypeNode;
}
@Override
private boolean isAnonymousTypeNode(TypeParameterNode typeNode) {
SyntaxKind paramKind = typeNode.typeNode().kind();
if (paramKind == SyntaxKind.RECORD_TYPE_DESC || paramKind == SyntaxKind.OBJECT_TYPE_DESC
|| paramKind == SyntaxKind.ERROR_TYPE_DESC) {
return checkIfAnonymous(typeNode);
}
return false;
}
@Override
public BLangNode transform(DistinctTypeDescriptorNode distinctTypeDesc) {
BLangType typeNode = createTypeNode(distinctTypeDesc.typeDescriptor());
typeNode.flagSet.add(Flag.DISTINCT);
return typeNode;
}
@Override
public BLangNode transform(ObjectTypeDescriptorNode objTypeDescNode) {
BLangObjectTypeNode objectTypeNode = (BLangObjectTypeNode) TreeBuilder.createObjectTypeNode();
for (Token qualifier : objTypeDescNode.objectTypeQualifiers()) {
SyntaxKind kind = qualifier.kind();
if (kind == SyntaxKind.CLIENT_KEYWORD) {
objectTypeNode.flagSet.add(Flag.CLIENT);
continue;
}
if (kind == SyntaxKind.SERVICE_KEYWORD) {
objectTypeNode.flagSet.add(SERVICE);
continue;
}
if (kind == SyntaxKind.ISOLATED_KEYWORD) {
objectTypeNode.flagSet.add(ISOLATED);
continue;
}
throw new RuntimeException("Syntax kind is not supported: " + kind);
}
NodeList<Node> members = objTypeDescNode.members();
for (Node node : members) {
BLangNode bLangNode = node.apply(this);
if (bLangNode.getKind() == NodeKind.FUNCTION) {
BLangFunction bLangFunction = (BLangFunction) bLangNode;
bLangFunction.attachedFunction = true;
bLangFunction.flagSet.add(Flag.ATTACHED);
if (Names.USER_DEFINED_INIT_SUFFIX.value.equals(bLangFunction.name.value)) {
if (objectTypeNode.initFunction == null) {
bLangFunction.objInitFunction = true;
objectTypeNode.initFunction = bLangFunction;
} else {
objectTypeNode.addFunction(bLangFunction);
}
} else {
objectTypeNode.addFunction(bLangFunction);
}
} else if (bLangNode.getKind() == NodeKind.RESOURCE_FUNC) {
BLangFunction bLangFunction = (BLangFunction) bLangNode;
bLangFunction.attachedFunction = true;
bLangFunction.flagSet.add(Flag.ATTACHED);
objectTypeNode.addFunction(bLangFunction);
dlog.error(getPosition(node), DiagnosticErrorCode.OBJECT_TYPE_DEF_DOES_NOT_ALLOW_RESOURCE_FUNC_DECL);
} else if (bLangNode.getKind() == NodeKind.VARIABLE) {
objectTypeNode.addField((BLangSimpleVariable) bLangNode);
} else if (bLangNode.getKind() == NodeKind.USER_DEFINED_TYPE) {
objectTypeNode.addTypeReference((BLangType) bLangNode);
}
}
objectTypeNode.pos = getPosition(objTypeDescNode);
if (members.size() > 0) {
objectTypeNode.pos = trimLeft(objectTypeNode.pos, getPosition(members.get(0)));
objectTypeNode.pos = trimRight(objectTypeNode.pos, getPosition(members.get(members.size() - 1)));
} else {
objectTypeNode.pos = trimLeft(objectTypeNode.pos, getPosition(objTypeDescNode.closeBrace()));
objectTypeNode.pos = trimRight(objectTypeNode.pos, getPosition(objTypeDescNode.openBrace()));
}
boolean isAnonymous = checkIfAnonymous(objTypeDescNode);
objectTypeNode.isAnonymous = isAnonymous;
if (!isAnonymous) {
return objectTypeNode;
}
return deSugarTypeAsUserDefType(objectTypeNode);
}
public BLangClassDefinition transformObjectCtorExpressionBody(NodeList<Node> members) {
BLangClassDefinition classDefinition = (BLangClassDefinition) TreeBuilder.createClassDefNode();
classDefinition.flagSet.add(Flag.ANONYMOUS);
classDefinition.flagSet.add(Flag.OBJECT_CTOR);
for (Node node : members) {
BLangNode bLangNode = node.apply(this);
NodeKind nodeKind = bLangNode.getKind();
if (nodeKind == NodeKind.FUNCTION || bLangNode.getKind() == NodeKind.RESOURCE_FUNC) {
BLangFunction bLangFunction = (BLangFunction) bLangNode;
bLangFunction.attachedFunction = true;
bLangFunction.flagSet.add(Flag.ATTACHED);
if (!Names.USER_DEFINED_INIT_SUFFIX.value.equals(bLangFunction.name.value)) {
classDefinition.addFunction(bLangFunction);
continue;
}
if (classDefinition.initFunction != null) {
classDefinition.addFunction(bLangFunction);
continue;
}
if (bLangFunction.requiredParams.size() != 0) {
dlog.error(bLangFunction.pos, DiagnosticErrorCode.OBJECT_CTOR_INIT_CANNOT_HAVE_PARAMETERS);
continue;
}
bLangFunction.objInitFunction = true;
classDefinition.initFunction = bLangFunction;
} else if (nodeKind == NodeKind.VARIABLE) {
classDefinition.addField((BLangSimpleVariable) bLangNode);
} else if (nodeKind == NodeKind.USER_DEFINED_TYPE) {
dlog.error(bLangNode.pos, DiagnosticErrorCode.OBJECT_CTOR_DOES_NOT_SUPPORT_TYPE_REFERENCE_MEMBERS);
}
}
classDefinition.internal = true;
return classDefinition;
}
/**
* Object constructor expression creates a class definition for the type defined through the object constructor.
* Then add the class definition as a top level node. Using the class definition initialize the object defined in
* the object constructor. Therefore this can be considered as a desugar.
* example:
* var objVariable = object { int n; };
*
* class anonType0 { int n; }
* var objVariable = new anonType0();
*
* @param objectConstructorExpressionNode object ctor expression node
* @return BLangTypeInit node which initialize the class definition
*/
@Override
public BLangNode transform(ObjectConstructorExpressionNode objectConstructorExpressionNode) {
Location pos = getPositionWithoutMetadata(objectConstructorExpressionNode);
BLangClassDefinition anonClass = transformObjectCtorExpressionBody(objectConstructorExpressionNode.members());
anonClass.pos = pos;
BLangObjectConstructorExpression objectCtorExpression = TreeBuilder.createObjectCtorExpression();
objectCtorExpression.pos = pos;
objectCtorExpression.classNode = anonClass;
String genName = anonymousModelHelper.getNextAnonymousTypeKey(packageID);
IdentifierNode anonTypeGenName = createIdentifier(pos, genName);
anonClass.setName(anonTypeGenName);
anonClass.flagSet.add(Flag.PUBLIC);
Optional<TypeDescriptorNode> typeReference = objectConstructorExpressionNode.typeReference();
typeReference.ifPresent(typeReferenceNode -> {
objectCtorExpression.addTypeReference(createTypeNode(typeReferenceNode));
});
anonClass.annAttachments = applyAll(objectConstructorExpressionNode.annotations());
addToTop(anonClass);
NodeList<Token> objectConstructorQualifierList = objectConstructorExpressionNode.objectTypeQualifiers();
for (Token qualifier : objectConstructorQualifierList) {
SyntaxKind kind = qualifier.kind();
if (kind == SyntaxKind.CLIENT_KEYWORD) {
anonClass.flagSet.add(Flag.CLIENT);
objectCtorExpression.isClient = true;
} else if (kind == SyntaxKind.ISOLATED_KEYWORD) {
anonClass.flagSet.add(Flag.ISOLATED);
} else if (qualifier.kind() == SyntaxKind.SERVICE_KEYWORD) {
anonClass.flagSet.add(SERVICE);
objectCtorExpression.isService = true;
} else {
throw new RuntimeException("Syntax kind is not supported: " + kind);
}
}
BLangIdentifier identifier = (BLangIdentifier) TreeBuilder.createIdentifierNode();
BLangUserDefinedType userDefinedType = createUserDefinedType(pos, identifier, anonClass.name);
BLangTypeInit initNode = (BLangTypeInit) TreeBuilder.createInitNode();
initNode.pos = pos;
initNode.userDefinedType = userDefinedType;
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier pkgAlias = createIdentifier(pos, "");
BLangNameReference nameReference = new BLangNameReference(pos, null, pkgAlias, createIdentifier(pos, genName));
invocationNode.name = (BLangIdentifier) nameReference.name;
invocationNode.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
initNode.argsExpr.addAll(invocationNode.argExprs);
initNode.initInvocation = invocationNode;
objectCtorExpression.typeInit = initNode;
return objectCtorExpression;
}
@Override
public BLangNode transform(ObjectFieldNode objFieldNode) {
BLangSimpleVariable simpleVar = createSimpleVar(objFieldNode.fieldName(), objFieldNode.typeName(),
objFieldNode.expression().orElse(null), objFieldNode.visibilityQualifier().orElse(null),
getAnnotations(objFieldNode.metadata()));
Optional<Node> doc = getDocumentationString(objFieldNode.metadata());
simpleVar.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(doc);
NodeList<Token> qualifierList = objFieldNode.qualifierList();
for (Token token : qualifierList) {
if (token.kind() == SyntaxKind.FINAL_KEYWORD) {
addFinalQualifier(simpleVar);
} else if (token.kind() == SyntaxKind.RESOURCE_KEYWORD) {
addResourceQualifier(simpleVar);
}
}
simpleVar.flagSet.add(Flag.FIELD);
simpleVar.pos = getPositionWithoutMetadata(objFieldNode);
return simpleVar;
}
private void addResourceQualifier(BLangSimpleVariable simpleVar) {
simpleVar.flagSet.add(Flag.RESOURCE);
}
@Override
public BLangNode transform(ExpressionFunctionBodyNode expressionFunctionBodyNode) {
BLangExprFunctionBody bLExprFunctionBody = (BLangExprFunctionBody) TreeBuilder.createExprFunctionBodyNode();
bLExprFunctionBody.expr = createExpression(expressionFunctionBodyNode.expression());
bLExprFunctionBody.pos = getPosition(expressionFunctionBodyNode);
return bLExprFunctionBody;
}
@Override
public BLangNode transform(RecordTypeDescriptorNode recordTypeDescriptorNode) {
BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) TreeBuilder.createRecordTypeNode();
boolean hasRestField = false;
boolean isAnonymous = checkIfAnonymous(recordTypeDescriptorNode);
for (Node field : recordTypeDescriptorNode.fields()) {
if (field.kind() == SyntaxKind.RECORD_FIELD) {
BLangSimpleVariable bLFiled = (BLangSimpleVariable) field.apply(this);
Optional<Node> doc = getDocumentationString(((RecordFieldNode) field).metadata());
bLFiled.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(doc);
recordTypeNode.fields.add(bLFiled);
} else if (field.kind() == SyntaxKind.RECORD_FIELD_WITH_DEFAULT_VALUE) {
BLangSimpleVariable bLFiled = (BLangSimpleVariable) field.apply(this);
Optional<Node> doc = getDocumentationString(((RecordFieldWithDefaultValueNode) field).metadata());
bLFiled.markdownDocumentationAttachment = createMarkdownDocumentationAttachment(doc);
recordTypeNode.fields.add(bLFiled);
} else {
recordTypeNode.addTypeReference(createTypeNode(field));
}
}
Optional<RecordRestDescriptorNode> recordRestDesc = recordTypeDescriptorNode.recordRestDescriptor();
if (recordRestDesc.isPresent()) {
recordTypeNode.restFieldType = createTypeNode(recordRestDesc.get());
hasRestField = true;
}
boolean isOpen = recordTypeDescriptorNode.bodyStartDelimiter().kind() == SyntaxKind.OPEN_BRACE_TOKEN;
recordTypeNode.sealed = !(hasRestField || isOpen);
recordTypeNode.pos = getPosition(recordTypeDescriptorNode);
recordTypeNode.isAnonymous = isAnonymous;
recordTypeNode.isLocal = this.isInLocalContext;
if (!isAnonymous || this.isInLocalContext) {
return recordTypeNode;
}
return createAnonymousRecordType(recordTypeDescriptorNode, recordTypeNode);
}
@Override
public BLangNode transform(SingletonTypeDescriptorNode singletonTypeDescriptorNode) {
BLangFiniteTypeNode bLangFiniteTypeNode = new BLangFiniteTypeNode();
BLangLiteral simpleLiteral = createSimpleLiteral(singletonTypeDescriptorNode.simpleContExprNode());
bLangFiniteTypeNode.pos = simpleLiteral.pos;
bLangFiniteTypeNode.valueSpace.add(simpleLiteral);
return bLangFiniteTypeNode;
}
@Override
public BLangNode transform(BuiltinSimpleNameReferenceNode singletonTypeDescriptorNode) {
return createTypeNode(singletonTypeDescriptorNode);
}
@Override
public BLangNode transform(TypeReferenceNode typeReferenceNode) {
return createTypeNode(typeReferenceNode.typeName());
}
@Override
public BLangNode transform(RecordFieldNode recordFieldNode) {
BLangSimpleVariable simpleVar = createSimpleVar(recordFieldNode.fieldName(), recordFieldNode.typeName(),
getAnnotations(recordFieldNode.metadata()));
simpleVar.flagSet.add(Flag.PUBLIC);
if (recordFieldNode.questionMarkToken().isPresent()) {
simpleVar.flagSet.add(Flag.OPTIONAL);
} else {
simpleVar.flagSet.add(Flag.REQUIRED);
}
simpleVar.flagSet.add(Flag.FIELD);
addReadOnlyQualifier(recordFieldNode.readonlyKeyword(), simpleVar);
simpleVar.pos = getPositionWithoutMetadata(recordFieldNode);
return simpleVar;
}
@Override
public BLangNode transform(RecordFieldWithDefaultValueNode recordFieldNode) {
BLangSimpleVariable simpleVar = createSimpleVar(recordFieldNode.fieldName(), recordFieldNode.typeName(),
getAnnotations(recordFieldNode.metadata()));
simpleVar.flagSet.add(Flag.PUBLIC);
if (isPresent(recordFieldNode.expression())) {
simpleVar.setInitialExpression(createExpression(recordFieldNode.expression()));
}
addReadOnlyQualifier(recordFieldNode.readonlyKeyword(), simpleVar);
simpleVar.pos = getPositionWithoutMetadata(recordFieldNode);
return simpleVar;
}
private void addReadOnlyQualifier(Optional<Token> readonlyKeyword, BLangSimpleVariable simpleVar) {
if (readonlyKeyword.isPresent()) {
simpleVar.flagSet.add(Flag.READONLY);
}
}
@Override
public BLangNode transform(RecordRestDescriptorNode recordFieldNode) {
return createTypeNode(recordFieldNode.typeName());
}
@Override
public BLangNode transform(FunctionDefinitionNode funcDefNode) {
BLangFunction bLFunction;
if (funcDefNode.relativeResourcePath().isEmpty()) {
bLFunction = createFunctionNode(funcDefNode.functionName(), funcDefNode.qualifierList(),
funcDefNode.functionSignature(), funcDefNode.functionBody());
} else {
bLFunction = createResourceFunctionNode(funcDefNode.functionName(),
funcDefNode.qualifierList(), funcDefNode.relativeResourcePath(),
funcDefNode.functionSignature(), funcDefNode.functionBody());
}
bLFunction.annAttachments = applyAll(getAnnotations(funcDefNode.metadata()));
bLFunction.pos = getPositionWithoutMetadata(funcDefNode);
bLFunction.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(funcDefNode.metadata()));
return bLFunction;
}
private BLangFunction createFunctionNode(IdentifierToken funcName, NodeList<Token> qualifierList,
FunctionSignatureNode functionSignature, FunctionBodyNode functionBody) {
BLangFunction bLFunction = (BLangFunction) TreeBuilder.createFunctionNode();
BLangIdentifier name = createIdentifier(getPosition(funcName), funcName);
populateFunctionNode(name, qualifierList, functionSignature, functionBody, bLFunction);
return bLFunction;
}
private void populateFunctionNode(BLangIdentifier name, NodeList<Token> qualifierList,
FunctionSignatureNode functionSignature, FunctionBodyNode functionBody,
BLangFunction bLFunction) {
bLFunction.name = name;
setFunctionQualifiers(bLFunction, qualifierList);
populateFuncSignature(bLFunction, functionSignature);
if (functionBody == null) {
bLFunction.body = null;
bLFunction.flagSet.add(Flag.INTERFACE);
bLFunction.interfaceFunction = true;
} else {
bLFunction.body = (BLangFunctionBody) functionBody.apply(this);
if (bLFunction.body.getKind() == NodeKind.EXTERN_FUNCTION_BODY) {
bLFunction.flagSet.add(Flag.NATIVE);
}
}
}
private void setFunctionQualifiers(BLangFunction bLFunction, NodeList<Token> qualifierList) {
for (Token qualifier : qualifierList) {
switch (qualifier.kind()) {
case PUBLIC_KEYWORD:
bLFunction.flagSet.add(Flag.PUBLIC);
break;
case PRIVATE_KEYWORD:
bLFunction.flagSet.add(Flag.PRIVATE);
break;
case REMOTE_KEYWORD:
bLFunction.flagSet.add(Flag.REMOTE);
break;
case TRANSACTIONAL_KEYWORD:
bLFunction.flagSet.add(Flag.TRANSACTIONAL);
break;
case RESOURCE_KEYWORD:
bLFunction.flagSet.add(Flag.RESOURCE);
break;
case ISOLATED_KEYWORD:
bLFunction.flagSet.add(Flag.ISOLATED);
break;
default:
continue;
}
}
}
@Override
public BLangNode transform(ExternalFunctionBodyNode externalFunctionBodyNode) {
BLangExternalFunctionBody externFunctionBodyNode =
(BLangExternalFunctionBody) TreeBuilder.createExternFunctionBodyNode();
externFunctionBodyNode.annAttachments = applyAll(externalFunctionBodyNode.annotations());
externFunctionBodyNode.pos = getPosition(externalFunctionBodyNode);
return externFunctionBodyNode;
}
@Override
public BLangNode transform(ExplicitAnonymousFunctionExpressionNode anonFuncExprNode) {
BLangFunction bLFunction = (BLangFunction) TreeBuilder.createFunctionNode();
Location pos = getPosition(anonFuncExprNode);
bLFunction.name = createIdentifier(symTable.builtinPos,
anonymousModelHelper.getNextAnonymousFunctionKey(packageID));
populateFuncSignature(bLFunction, anonFuncExprNode.functionSignature());
bLFunction.body = (BLangFunctionBody) anonFuncExprNode.functionBody().apply(this);
bLFunction.pos = pos;
bLFunction.addFlag(Flag.LAMBDA);
bLFunction.addFlag(Flag.ANONYMOUS);
setFunctionQualifiers(bLFunction, anonFuncExprNode.qualifierList());
addToTop(bLFunction);
BLangLambdaFunction lambdaExpr = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaExpr.function = bLFunction;
lambdaExpr.pos = pos;
return lambdaExpr;
}
@Override
public BLangNode transform(FunctionBodyBlockNode functionBodyBlockNode) {
BLangBlockFunctionBody bLFuncBody = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
this.isInLocalContext = true;
List<BLangStatement> statements = new ArrayList<>();
if (functionBodyBlockNode.namedWorkerDeclarator().isPresent()) {
NamedWorkerDeclarator namedWorkerDeclarator = functionBodyBlockNode.namedWorkerDeclarator().get();
generateAndAddBLangStatements(namedWorkerDeclarator.workerInitStatements(), statements);
for (NamedWorkerDeclarationNode workerDeclarationNode : namedWorkerDeclarator.namedWorkerDeclarations()) {
statements.add((BLangStatement) workerDeclarationNode.apply(this));
while (!this.additionalStatements.empty()) {
statements.add(additionalStatements.pop());
}
}
}
generateAndAddBLangStatements(functionBodyBlockNode.statements(), statements);
bLFuncBody.stmts = statements;
bLFuncBody.pos = getPosition(functionBodyBlockNode);
this.isInLocalContext = false;
return bLFuncBody;
}
@Override
public BLangNode transform(ForEachStatementNode forEachStatementNode) {
BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();
foreach.pos = getPosition(forEachStatementNode);
TypedBindingPatternNode typedBindingPatternNode = forEachStatementNode.typedBindingPattern();
VariableDefinitionNode variableDefinitionNode = createBLangVarDef(getPosition(typedBindingPatternNode),
typedBindingPatternNode, Optional.empty(), Optional.empty());
foreach.setVariableDefinitionNode(variableDefinitionNode);
foreach.isDeclaredWithVar = typedBindingPatternNode.typeDescriptor().kind() == SyntaxKind.VAR_TYPE_DESC;
BLangBlockStmt foreachBlock = (BLangBlockStmt) forEachStatementNode.blockStatement().apply(this);
foreachBlock.pos = getPosition(forEachStatementNode.blockStatement());
foreach.setBody(foreachBlock);
foreach.setCollection(createExpression(forEachStatementNode.actionOrExpressionNode()));
forEachStatementNode.onFailClause().ifPresent(onFailClauseNode -> {
foreach.setOnFailClause(
(org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this)));
});
return foreach;
}
@Override
public BLangNode transform(ForkStatementNode forkStatementNode) {
BLangForkJoin forkJoin = (BLangForkJoin) TreeBuilder.createForkJoinNode();
Location forkStmtPos = getPosition(forkStatementNode);
forkJoin.pos = forkStmtPos;
return forkJoin;
}
@Override
public BLangNode transform(NamedWorkerDeclarationNode namedWorkerDeclNode) {
BLangFunction bLFunction = (BLangFunction) TreeBuilder.createFunctionNode();
Location workerBodyPos = getPosition(namedWorkerDeclNode.workerBody());
bLFunction.name = createIdentifier(symTable.builtinPos,
anonymousModelHelper.getNextAnonymousFunctionKey(packageID));
BLangBlockStmt blockStmt = (BLangBlockStmt) namedWorkerDeclNode.workerBody().apply(this);
BLangBlockFunctionBody bodyNode = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
bodyNode.stmts = blockStmt.stmts;
bodyNode.pos = workerBodyPos;
bLFunction.body = bodyNode;
bLFunction.internal = true;
bLFunction.pos = workerBodyPos;
bLFunction.addFlag(Flag.LAMBDA);
bLFunction.addFlag(Flag.ANONYMOUS);
bLFunction.addFlag(Flag.WORKER);
if (namedWorkerDeclNode.transactionalKeyword().isPresent()) {
bLFunction.addFlag(Flag.TRANSACTIONAL);
}
String workerName;
if (namedWorkerDeclNode.workerName().isMissing()) {
workerName = missingNodesHelper.getNextMissingNodeName(packageID);
} else {
workerName = namedWorkerDeclNode.workerName().text();
}
if (workerName.startsWith(IDENTIFIER_LITERAL_PREFIX)) {
bLFunction.defaultWorkerName.originalValue = workerName;
workerName = IdentifierUtils.unescapeUnicodeCodepoints(workerName.substring(1));
}
bLFunction.defaultWorkerName.value = workerName;
bLFunction.defaultWorkerName.pos = getPosition(namedWorkerDeclNode.workerName());
NodeList<AnnotationNode> annotations = namedWorkerDeclNode.annotations();
bLFunction.annAttachments = applyAll(annotations);
Optional<Node> retNode = namedWorkerDeclNode.returnTypeDesc();
if (retNode.isPresent()) {
ReturnTypeDescriptorNode returnType = (ReturnTypeDescriptorNode) retNode.get();
bLFunction.setReturnTypeNode(createTypeNode(returnType.type()));
} else {
BLangValueType bLValueType = (BLangValueType) TreeBuilder.createValueTypeNode();
bLValueType.pos = getPosition(namedWorkerDeclNode);
bLValueType.typeKind = TypeKind.NIL;
bLFunction.setReturnTypeNode(bLValueType);
}
addToTop(bLFunction);
BLangLambdaFunction lambdaExpr = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaExpr.function = bLFunction;
lambdaExpr.pos = workerBodyPos;
lambdaExpr.internal = true;
String workerLambdaName = WORKER_LAMBDA_VAR_PREFIX + workerName;
Location workerNamePos = getPosition(namedWorkerDeclNode.workerName());
BLangSimpleVariable var = new SimpleVarBuilder()
.with(workerLambdaName, workerNamePos)
.setExpression(lambdaExpr)
.isDeclaredWithVar()
.isFinal()
.build();
if (namedWorkerDeclNode.transactionalKeyword().isPresent()) {
var.addFlag(Flag.TRANSACTIONAL);
}
BLangSimpleVariableDef lamdaWrkr = (BLangSimpleVariableDef) TreeBuilder.createSimpleVariableDefinitionNode();
lamdaWrkr.pos = workerBodyPos;
var.pos = workerBodyPos;
lamdaWrkr.setVariable(var);
lamdaWrkr.isWorker = true;
lamdaWrkr.internal = var.internal = true;
if (namedWorkerDeclNode.parent().kind() == SyntaxKind.FORK_STATEMENT) {
lamdaWrkr.isInFork = true;
lamdaWrkr.var.flagSet.add(Flag.FORKED);
}
BLangInvocation bLInvocation = (BLangInvocation) TreeBuilder.createActionInvocation();
BLangIdentifier nameInd = this.createIdentifier(workerNamePos, workerLambdaName);
BLangNameReference reference = new BLangNameReference(workerNamePos, null, TreeBuilder.createIdentifierNode(),
nameInd);
bLInvocation.pkgAlias = (BLangIdentifier) reference.pkgAlias;
bLInvocation.name = (BLangIdentifier) reference.name;
bLInvocation.pos = workerNamePos;
bLInvocation.flagSet = new HashSet<>();
bLInvocation.annAttachments = bLFunction.annAttachments;
if (bLInvocation.getKind() == NodeKind.INVOCATION) {
bLInvocation.async = true;
} else {
dlog.error(workerBodyPos, DiagnosticErrorCode.START_REQUIRE_INVOCATION);
}
BLangSimpleVariable invoc = new SimpleVarBuilder()
.with(workerName, workerNamePos)
.isDeclaredWithVar()
.isWorkerVar()
.setExpression(bLInvocation)
.isFinal()
.setPos(workerNamePos)
.build();
BLangSimpleVariableDef workerInvoc = (BLangSimpleVariableDef) TreeBuilder.createSimpleVariableDefinitionNode();
workerInvoc.pos = workerNamePos;
workerInvoc.setVariable(invoc);
workerInvoc.isWorker = true;
invoc.flagSet.add(Flag.WORKER);
this.additionalStatements.push(workerInvoc);
return lamdaWrkr;
}
private <A extends BLangNode, B extends Node> List<A> applyAll(NodeList<B> annotations) {
ArrayList<A> annAttachments = new ArrayList<>();
if (annotations == null) {
return annAttachments;
}
for (B annotation : annotations) {
A blNode = (A) annotation.apply(this);
annAttachments.add(blNode);
}
return annAttachments;
}
@Override
public BLangNode transform(AnnotationNode annotation) {
Node name = annotation.annotReference();
BLangAnnotationAttachment bLAnnotationAttachment =
(BLangAnnotationAttachment) TreeBuilder.createAnnotAttachmentNode();
if (annotation.annotValue().isPresent()) {
MappingConstructorExpressionNode map = annotation.annotValue().get();
BLangExpression bLExpression = (BLangExpression) map.apply(this);
bLAnnotationAttachment.setExpression(bLExpression);
}
BLangNameReference nameReference = createBLangNameReference(name);
bLAnnotationAttachment.setAnnotationName(nameReference.name);
bLAnnotationAttachment.setPackageAlias(nameReference.pkgAlias);
bLAnnotationAttachment.pos = getPosition(annotation);
return bLAnnotationAttachment;
}
@Override
public BLangNode transform(QueryActionNode queryActionNode) {
BLangQueryAction bLQueryAction = (BLangQueryAction) TreeBuilder.createQueryActionNode();
BLangDoClause doClause = (BLangDoClause) TreeBuilder.createDoClauseNode();
doClause.body = (BLangBlockStmt) queryActionNode.blockStatement().apply(this);
doClause.body.pos = expandLeft(doClause.body.pos, getPosition(queryActionNode.doKeyword()));
doClause.pos = doClause.body.pos;
bLQueryAction.queryClauseList.add(queryActionNode.queryPipeline().fromClause().apply(this));
bLQueryAction.queryClauseList.addAll(applyAll(queryActionNode.queryPipeline().intermediateClauses()));
bLQueryAction.queryClauseList.add(doClause);
bLQueryAction.doClause = doClause;
bLQueryAction.pos = getPosition(queryActionNode);
return bLQueryAction;
}
@Override
public BLangNode transform(AnnotationDeclarationNode annotationDeclarationNode) {
BLangAnnotation annotationDecl = (BLangAnnotation) TreeBuilder.createAnnotationNode();
Location pos = getPositionWithoutMetadata(annotationDeclarationNode);
annotationDecl.pos = pos;
annotationDecl.name = createIdentifier(annotationDeclarationNode.annotationTag());
if (annotationDeclarationNode.visibilityQualifier().isPresent()) {
annotationDecl.addFlag(Flag.PUBLIC);
}
if (annotationDeclarationNode.constKeyword().isPresent()) {
annotationDecl.addFlag(Flag.CONSTANT);
}
annotationDecl.annAttachments = applyAll(getAnnotations(annotationDeclarationNode.metadata()));
annotationDecl.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(annotationDeclarationNode.metadata()));
Optional<Node> typedesc = annotationDeclarationNode.typeDescriptor();
if (typedesc.isPresent()) {
annotationDecl.typeNode = createTypeNode(typedesc.get());
}
SeparatedNodeList<Node> paramList = annotationDeclarationNode.attachPoints();
for (Node child : paramList) {
AnnotationAttachPointNode attachPoint = (AnnotationAttachPointNode) child;
boolean source = attachPoint.sourceKeyword().isPresent();
AttachPoint bLAttachPoint;
NodeList<Token> idents = attachPoint.identifiers();
Token firstIndent = idents.get(0);
switch (firstIndent.kind()) {
case OBJECT_KEYWORD:
Token secondIndent = idents.get(1);
switch (secondIndent.kind()) {
case FUNCTION_KEYWORD:
bLAttachPoint =
AttachPoint.getAttachmentPoint(AttachPoint.Point.OBJECT_METHOD.getValue(), source);
break;
case FIELD_KEYWORD:
bLAttachPoint =
AttachPoint.getAttachmentPoint(AttachPoint.Point.OBJECT_FIELD.getValue(), source);
break;
default:
throw new RuntimeException("Syntax kind is not supported: " + secondIndent.kind());
}
break;
case SERVICE_KEYWORD:
String value;
if (idents.size() == 1) {
value = AttachPoint.Point.SERVICE.getValue();
} else if (idents.size() == 3) {
value = AttachPoint.Point.SERVICE_REMOTE.getValue();
} else {
throw new RuntimeException("Invalid annotation attach point");
}
bLAttachPoint = AttachPoint.getAttachmentPoint(value, source);
break;
case RECORD_KEYWORD:
bLAttachPoint = AttachPoint.getAttachmentPoint(AttachPoint.Point.RECORD_FIELD.getValue(), source);
break;
default:
bLAttachPoint = AttachPoint.getAttachmentPoint(firstIndent.text(), source);
}
annotationDecl.addAttachPoint(bLAttachPoint);
}
return annotationDecl;
}
@Override
public BLangNode transform(AnnotAccessExpressionNode annotAccessExpressionNode) {
BLangAnnotAccessExpr annotAccessExpr = (BLangAnnotAccessExpr) TreeBuilder.createAnnotAccessExpressionNode();
Node annotTagReference = annotAccessExpressionNode.annotTagReference();
if (annotAccessExpressionNode.annotTagReference().kind() == SyntaxKind.SIMPLE_NAME_REFERENCE) {
SimpleNameReferenceNode annotName = (SimpleNameReferenceNode) annotTagReference;
annotAccessExpr.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
annotAccessExpr.annotationName = createIdentifier(annotName.name());
} else {
QualifiedNameReferenceNode qulifiedName =
(QualifiedNameReferenceNode) annotTagReference;
annotAccessExpr.pkgAlias = createIdentifier(qulifiedName.modulePrefix());
annotAccessExpr.annotationName = createIdentifier(qulifiedName.identifier());
}
annotAccessExpr.pos = getPosition(annotAccessExpressionNode);
annotAccessExpr.expr = createExpression(annotAccessExpressionNode.expression());
return annotAccessExpr;
}
@Override
public BLangNode transform(ConditionalExpressionNode conditionalExpressionNode) {
BLangTernaryExpr ternaryExpr = (BLangTernaryExpr) TreeBuilder.createTernaryExpressionNode();
ternaryExpr.pos = getPosition(conditionalExpressionNode);
ternaryExpr.elseExpr = createExpression(conditionalExpressionNode.endExpression());
ternaryExpr.thenExpr = createExpression(conditionalExpressionNode.middleExpression());
ternaryExpr.expr = createExpression(conditionalExpressionNode.lhsExpression());
if (ternaryExpr.expr.getKind() == NodeKind.TERNARY_EXPR) {
BLangTernaryExpr root = (BLangTernaryExpr) ternaryExpr.expr;
BLangTernaryExpr parent = root;
while (parent.elseExpr.getKind() == NodeKind.TERNARY_EXPR) {
parent = (BLangTernaryExpr) parent.elseExpr;
}
ternaryExpr.expr = parent.elseExpr;
parent.elseExpr = ternaryExpr;
ternaryExpr = root;
}
return ternaryExpr;
}
@Override
public BLangNode transform(CheckExpressionNode checkExpressionNode) {
Location pos = getPosition(checkExpressionNode);
BLangExpression expr = createExpression(checkExpressionNode.expression());
if (checkExpressionNode.checkKeyword().kind() == SyntaxKind.CHECK_KEYWORD) {
return createCheckExpr(pos, expr);
}
return createCheckPanickedExpr(pos, expr);
}
@Override
public BLangNode transform(TypeTestExpressionNode typeTestExpressionNode) {
BLangTypeTestExpr typeTestExpr = (BLangTypeTestExpr) TreeBuilder.createTypeTestExpressionNode();
typeTestExpr.expr = createExpression(typeTestExpressionNode.expression());
typeTestExpr.typeNode = createTypeNode(typeTestExpressionNode.typeDescriptor());
typeTestExpr.pos = getPosition(typeTestExpressionNode);
return typeTestExpr;
}
@Override
public BLangNode transform(MappingConstructorExpressionNode mapConstruct) {
BLangRecordLiteral bLiteralNode = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
for (MappingFieldNode field : mapConstruct.fields()) {
if (field.kind() == SyntaxKind.SPREAD_FIELD) {
SpreadFieldNode spreadFieldNode = (SpreadFieldNode) field;
BLangRecordSpreadOperatorField bLRecordSpreadOpField =
(BLangRecordSpreadOperatorField) TreeBuilder.createRecordSpreadOperatorField();
bLRecordSpreadOpField.expr = createExpression(spreadFieldNode.valueExpr());
bLRecordSpreadOpField.pos = getPosition(spreadFieldNode);
bLiteralNode.fields.add(bLRecordSpreadOpField);
} else if (field.kind() == SyntaxKind.COMPUTED_NAME_FIELD) {
ComputedNameFieldNode computedNameField = (ComputedNameFieldNode) field;
BLangRecordKeyValueField bLRecordKeyValueField =
(BLangRecordKeyValueField) TreeBuilder.createRecordKeyValue();
bLRecordKeyValueField.valueExpr = createExpression(computedNameField.valueExpr());
bLRecordKeyValueField.key =
new BLangRecordLiteral.BLangRecordKey(createExpression(computedNameField.fieldNameExpr()));
bLRecordKeyValueField.key.computedKey = true;
bLiteralNode.fields.add(bLRecordKeyValueField);
} else {
SpecificFieldNode specificField = (SpecificFieldNode) field;
io.ballerina.compiler.syntax.tree.ExpressionNode valueExpr = specificField.valueExpr().orElse(null);
if (valueExpr == null) {
BLangRecordLiteral.BLangRecordVarNameField fieldVar =
(BLangRecordLiteral.BLangRecordVarNameField) TreeBuilder.createRecordVarRefNameFieldNode();
fieldVar.variableName = createIdentifier((Token) ((SpecificFieldNode) field).fieldName());
fieldVar.pkgAlias = createIdentifier(null, "");
fieldVar.pos = fieldVar.variableName.pos;
fieldVar.readonly = specificField.readonlyKeyword().isPresent();
bLiteralNode.fields.add(fieldVar);
} else {
BLangRecordKeyValueField bLRecordKeyValueField =
(BLangRecordKeyValueField) TreeBuilder.createRecordKeyValue();
bLRecordKeyValueField.pos = getPosition(specificField);
bLRecordKeyValueField.readonly = specificField.readonlyKeyword().isPresent();
bLRecordKeyValueField.valueExpr = createExpression(valueExpr);
bLRecordKeyValueField.key =
new BLangRecordLiteral.BLangRecordKey(createExpression(specificField.fieldName()));
bLRecordKeyValueField.key.computedKey = false;
bLRecordKeyValueField.key.pos = getPosition(specificField.fieldName());
bLiteralNode.fields.add(bLRecordKeyValueField);
}
}
}
bLiteralNode.pos = getPosition(mapConstruct);
return bLiteralNode;
}
@Override
public BLangNode transform(ListConstructorExpressionNode listConstructorExprNode) {
List<BLangExpression> argExprList = new ArrayList<>();
BLangListConstructorExpr listConstructorExpr = (BLangListConstructorExpr)
TreeBuilder.createListConstructorExpressionNode();
for (Node expr : listConstructorExprNode.expressions()) {
argExprList.add(createExpression(expr));
}
listConstructorExpr.exprs = argExprList;
listConstructorExpr.pos = getPosition(listConstructorExprNode);
return listConstructorExpr;
}
@Override
public BLangNode transform(UnaryExpressionNode unaryExprNode) {
Location pos = getPosition(unaryExprNode);
SyntaxKind expressionKind = unaryExprNode.expression().kind();
SyntaxKind unaryOperatorKind = unaryExprNode.unaryOperator().kind();
if (expressionKind == SyntaxKind.NUMERIC_LITERAL &&
(unaryOperatorKind == SyntaxKind.MINUS_TOKEN || unaryOperatorKind == SyntaxKind.PLUS_TOKEN)) {
return createSimpleLiteral(unaryExprNode);
}
OperatorKind operator = OperatorKind.valueFrom(unaryExprNode.unaryOperator().text());
BLangExpression expr = createExpression(unaryExprNode.expression());
return createBLangUnaryExpr(pos, operator, expr);
}
@Override
public BLangNode transform(TypeofExpressionNode typeofExpressionNode) {
Location pos = getPosition(typeofExpressionNode);
OperatorKind operator = OperatorKind.valueFrom(typeofExpressionNode.typeofKeyword().text());
BLangExpression expr = createExpression(typeofExpressionNode.expression());
return createBLangUnaryExpr(pos, operator, expr);
}
@Override
public BLangNode transform(BinaryExpressionNode binaryExprNode) {
if (binaryExprNode.operator().kind() == SyntaxKind.ELVIS_TOKEN) {
BLangElvisExpr elvisExpr = (BLangElvisExpr) TreeBuilder.createElvisExpressionNode();
elvisExpr.pos = getPosition(binaryExprNode);
elvisExpr.lhsExpr = createExpression(binaryExprNode.lhsExpr());
elvisExpr.rhsExpr = createExpression(binaryExprNode.rhsExpr());
return elvisExpr;
}
BLangBinaryExpr bLBinaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
bLBinaryExpr.pos = getPosition(binaryExprNode);
bLBinaryExpr.lhsExpr = createExpression(binaryExprNode.lhsExpr());
bLBinaryExpr.rhsExpr = createExpression(binaryExprNode.rhsExpr());
bLBinaryExpr.opKind = OperatorKind.valueFrom(binaryExprNode.operator().text());
return bLBinaryExpr;
}
@Override
public BLangNode transform(FieldAccessExpressionNode fieldAccessExprNode) {
BLangFieldBasedAccess bLFieldBasedAccess;
Node fieldName = fieldAccessExprNode.fieldName();
if (fieldName.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
QualifiedNameReferenceNode qualifiedFieldName = (QualifiedNameReferenceNode) fieldName;
BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess accessWithPrefixNode =
(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess)
TreeBuilder.createFieldBasedAccessWithPrefixNode();
accessWithPrefixNode.nsPrefix = createIdentifier(qualifiedFieldName.modulePrefix());
accessWithPrefixNode.field = createIdentifier(qualifiedFieldName.identifier());
bLFieldBasedAccess = accessWithPrefixNode;
bLFieldBasedAccess.fieldKind = FieldKind.WITH_NS;
} else {
bLFieldBasedAccess = (BLangFieldBasedAccess) TreeBuilder.createFieldBasedAccessNode();
bLFieldBasedAccess.field =
createIdentifier(((SimpleNameReferenceNode) fieldName).name());
bLFieldBasedAccess.fieldKind = FieldKind.SINGLE;
}
io.ballerina.compiler.syntax.tree.ExpressionNode containerExpr = fieldAccessExprNode.expression();
if (containerExpr.kind() == SyntaxKind.BRACED_EXPRESSION) {
bLFieldBasedAccess.expr = createExpression(((BracedExpressionNode) containerExpr).expression());
} else {
bLFieldBasedAccess.expr = createExpression(containerExpr);
}
bLFieldBasedAccess.pos = getPosition(fieldAccessExprNode);
bLFieldBasedAccess.field.pos = getPosition(fieldAccessExprNode.fieldName());
bLFieldBasedAccess.optionalFieldAccess = false;
return bLFieldBasedAccess;
}
@Override
public BLangNode transform(OptionalFieldAccessExpressionNode optionalFieldAccessExpressionNode) {
BLangFieldBasedAccess bLFieldBasedAccess;
Node fieldName = optionalFieldAccessExpressionNode.fieldName();
if (fieldName.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
QualifiedNameReferenceNode qualifiedFieldName = (QualifiedNameReferenceNode) fieldName;
BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess accessWithPrefixNode =
(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) TreeBuilder
.createFieldBasedAccessWithPrefixNode();
accessWithPrefixNode.nsPrefix = createIdentifier(qualifiedFieldName.modulePrefix());
accessWithPrefixNode.field = createIdentifier(qualifiedFieldName.identifier());
bLFieldBasedAccess = accessWithPrefixNode;
bLFieldBasedAccess.fieldKind = FieldKind.WITH_NS;
} else {
bLFieldBasedAccess = (BLangFieldBasedAccess) TreeBuilder.createFieldBasedAccessNode();
bLFieldBasedAccess.field = createIdentifier(((SimpleNameReferenceNode) fieldName).name());
bLFieldBasedAccess.fieldKind = FieldKind.SINGLE;
}
bLFieldBasedAccess.pos = getPosition(optionalFieldAccessExpressionNode);
bLFieldBasedAccess.field.pos = getPosition(optionalFieldAccessExpressionNode.fieldName());
bLFieldBasedAccess.expr = createExpression(optionalFieldAccessExpressionNode.expression());
bLFieldBasedAccess.optionalFieldAccess = true;
return bLFieldBasedAccess;
}
@Override
public BLangNode transform(BracedExpressionNode brcExprOut) {
return createExpression(brcExprOut.expression());
}
@Override
public BLangNode transform(FunctionCallExpressionNode functionCallNode) {
return createBLangInvocation(functionCallNode.functionName(), functionCallNode.arguments(),
getPosition(functionCallNode), isFunctionCallAsync(functionCallNode));
}
@Override
public BLangNode transform(ErrorConstructorExpressionNode errorConstructorExprNode) {
BLangErrorConstructorExpr errorConstructorExpr =
(BLangErrorConstructorExpr) TreeBuilder.createErrorConstructorExpressionNode();
errorConstructorExpr.pos = getPosition(errorConstructorExprNode);
if (errorConstructorExprNode.typeReference().isPresent()) {
errorConstructorExpr.errorTypeRef =
(BLangUserDefinedType) createTypeNode(errorConstructorExprNode.typeReference().get());
}
List<BLangExpression> positionalArgs = new ArrayList<>();
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>();
for (Node argNode : errorConstructorExprNode.arguments()) {
if (argNode.kind() == SyntaxKind.POSITIONAL_ARG) {
positionalArgs.add((BLangExpression) transform((PositionalArgumentNode) argNode));
} else if (argNode.kind() == SyntaxKind.NAMED_ARG) {
namedArgs.add((BLangNamedArgsExpression) transform((NamedArgumentNode) argNode));
}
}
errorConstructorExpr.positionalArgs = positionalArgs;
errorConstructorExpr.namedArgs = namedArgs;
return errorConstructorExpr;
}
public BLangNode transform(MethodCallExpressionNode methodCallExprNode) {
BLangInvocation bLInvocation = createBLangInvocation(methodCallExprNode.methodName(),
methodCallExprNode.arguments(),
getPosition(methodCallExprNode), false);
bLInvocation.expr = createExpression(methodCallExprNode.expression());
return bLInvocation;
}
@Override
public BLangNode transform(ImplicitNewExpressionNode implicitNewExprNode) {
BLangTypeInit initNode = createTypeInit(implicitNewExprNode);
BLangInvocation invocationNode = createInvocation(implicitNewExprNode, implicitNewExprNode.newKeyword());
initNode.argsExpr.addAll(invocationNode.argExprs);
initNode.initInvocation = invocationNode;
return initNode;
}
@Override
public BLangNode transform(ExplicitNewExpressionNode explicitNewExprNode) {
BLangTypeInit initNode = createTypeInit(explicitNewExprNode);
BLangInvocation invocationNode = createInvocation(explicitNewExprNode, explicitNewExprNode.newKeyword());
initNode.argsExpr.addAll(invocationNode.argExprs);
initNode.initInvocation = invocationNode;
return initNode;
}
private boolean isFunctionCallAsync(FunctionCallExpressionNode functionCallExpressionNode) {
return functionCallExpressionNode.parent().kind() == SyntaxKind.START_ACTION;
}
private BLangTypeInit createTypeInit(NewExpressionNode expression) {
BLangTypeInit initNode = (BLangTypeInit) TreeBuilder.createInitNode();
initNode.pos = getPosition(expression);
if (expression.kind() == SyntaxKind.EXPLICIT_NEW_EXPRESSION) {
Node type = ((ExplicitNewExpressionNode) expression).typeDescriptor();
initNode.userDefinedType = createTypeNode(type);
}
return initNode;
}
private BLangInvocation createInvocation(NewExpressionNode expression, Token newKeyword) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = getPosition(expression);
populateArgsInvocation(expression, invocationNode);
BLangNameReference nameReference = createBLangNameReference(newKeyword);
invocationNode.name = (BLangIdentifier) nameReference.name;
invocationNode.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
return invocationNode;
}
private void populateArgsInvocation(NewExpressionNode expression, BLangInvocation invocationNode) {
Iterator<FunctionArgumentNode> argumentsIter = getArgumentNodesIterator(expression);
if (argumentsIter != null) {
while (argumentsIter.hasNext()) {
BLangExpression argument = createExpression(argumentsIter.next());
invocationNode.argExprs.add(argument);
}
}
}
private Iterator<FunctionArgumentNode> getArgumentNodesIterator(NewExpressionNode expression) {
Iterator<FunctionArgumentNode> argumentsIter = null;
if (expression.kind() == SyntaxKind.IMPLICIT_NEW_EXPRESSION) {
Optional<ParenthesizedArgList> argsList = ((ImplicitNewExpressionNode) expression).parenthesizedArgList();
if (argsList.isPresent()) {
ParenthesizedArgList argList = argsList.get();
argumentsIter = argList.arguments().iterator();
}
} else {
ParenthesizedArgList argList =
(ParenthesizedArgList) ((ExplicitNewExpressionNode) expression).parenthesizedArgList();
argumentsIter = argList.arguments().iterator();
}
return argumentsIter;
}
@Override
public BLangNode transform(IndexedExpressionNode indexedExpressionNode) {
BLangIndexBasedAccess indexBasedAccess = (BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode();
indexBasedAccess.pos = getPosition(indexedExpressionNode);
SeparatedNodeList<io.ballerina.compiler.syntax.tree.ExpressionNode> keys =
indexedExpressionNode.keyExpression();
if (keys.size() == 1) {
indexBasedAccess.indexExpr = createExpression(indexedExpressionNode.keyExpression().get(0));
} else {
BLangTableMultiKeyExpr multiKeyExpr =
(BLangTableMultiKeyExpr) TreeBuilder.createTableMultiKeyExpressionNode();
multiKeyExpr.pos = getPosition(keys.get(0), keys.get(keys.size() - 1));
List<BLangExpression> multiKeyIndexExprs = new ArrayList<>();
for (io.ballerina.compiler.syntax.tree.ExpressionNode keyExpr : keys) {
multiKeyIndexExprs.add(createExpression(keyExpr));
}
multiKeyExpr.multiKeyIndexExprs = multiKeyIndexExprs;
indexBasedAccess.indexExpr = multiKeyExpr;
}
Node containerExpr = indexedExpressionNode.containerExpression();
BLangExpression expression = createExpression(containerExpr);
if (containerExpr.kind() == SyntaxKind.BRACED_EXPRESSION) {
indexBasedAccess.expr = ((BLangGroupExpr) expression).expression;
BLangGroupExpr group = (BLangGroupExpr) TreeBuilder.createGroupExpressionNode();
group.expression = indexBasedAccess;
group.pos = getPosition(indexedExpressionNode);
return group;
} else if (containerExpr.kind() == SyntaxKind.XML_STEP_EXPRESSION) {
((BLangXMLNavigationAccess) expression).childIndex = indexBasedAccess.indexExpr;
return expression;
}
indexBasedAccess.expr = expression;
return indexBasedAccess;
}
@Override
public BLangTypeConversionExpr transform(TypeCastExpressionNode typeCastExpressionNode) {
BLangTypeConversionExpr typeConversionNode = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
typeConversionNode.pos = getPosition(typeCastExpressionNode);
TypeCastParamNode typeCastParamNode = typeCastExpressionNode.typeCastParam();
if (typeCastParamNode != null && typeCastParamNode.type().isPresent()) {
typeConversionNode.typeNode = createTypeNode(typeCastParamNode.type().get());
}
typeConversionNode.expr = createExpression(typeCastExpressionNode.expression());
typeConversionNode.annAttachments = applyAll(typeCastParamNode.annotations());
return typeConversionNode;
}
@Override
public BLangNode transform(Token token) {
SyntaxKind kind = token.kind();
switch (kind) {
case XML_TEXT_CONTENT:
case TEMPLATE_STRING:
case CLOSE_BRACE_TOKEN:
return createSimpleLiteral(token);
default:
throw new RuntimeException("Syntax kind is not supported: " + kind);
}
}
@Override
public BLangNode transform(InterpolationNode interpolationNode) {
return createExpression(interpolationNode.expression());
}
@Override
public BLangNode transform(TemplateExpressionNode expressionNode) {
SyntaxKind kind = expressionNode.kind();
switch (kind) {
case XML_TEMPLATE_EXPRESSION:
return createXmlTemplateLiteral(expressionNode);
case STRING_TEMPLATE_EXPRESSION:
return createStringTemplateLiteral(expressionNode.content(), getPosition(expressionNode));
case RAW_TEMPLATE_EXPRESSION:
return createRawTemplateLiteral(expressionNode.content(), getPosition(expressionNode));
default:
throw new RuntimeException("Syntax kind is not supported: " + kind);
}
}
@Override
public BLangNode transform(TableConstructorExpressionNode tableConstructorExpressionNode) {
BLangTableConstructorExpr tableConstructorExpr =
(BLangTableConstructorExpr) TreeBuilder.createTableConstructorExpressionNode();
tableConstructorExpr.pos = getPosition(tableConstructorExpressionNode);
for (Node row : tableConstructorExpressionNode.rows()) {
tableConstructorExpr.addRecordLiteral((BLangRecordLiteral) row.apply(this));
}
if (tableConstructorExpressionNode.keySpecifier().isPresent()) {
tableConstructorExpr.tableKeySpecifier =
(BLangTableKeySpecifier) tableConstructorExpressionNode.keySpecifier().orElse(null).apply(this);
}
return tableConstructorExpr;
}
@Override
public BLangNode transform(TrapExpressionNode trapExpressionNode) {
BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode();
trapExpr.expr = createExpression(trapExpressionNode.expression());
trapExpr.pos = getPosition(trapExpressionNode);
return trapExpr;
}
@Override
public BLangNode transform(ReceiveActionNode receiveActionNode) {
BLangWorkerReceive workerReceiveExpr = (BLangWorkerReceive) TreeBuilder.createWorkerReceiveNode();
Node receiveWorkers = receiveActionNode.receiveWorkers();
Token workerName;
if (receiveWorkers.kind() == SyntaxKind.SIMPLE_NAME_REFERENCE) {
workerName = ((SimpleNameReferenceNode) receiveWorkers).name();
} else {
Location receiveFieldsPos = getPosition(receiveWorkers);
dlog.error(receiveFieldsPos, DiagnosticErrorCode.MULTIPLE_RECEIVE_ACTION_NOT_YET_SUPPORTED);
workerName = NodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN,
NodeFactory.createEmptyMinutiaeList(), NodeFactory.createEmptyMinutiaeList());
}
workerReceiveExpr.setWorkerName(createIdentifier(workerName));
workerReceiveExpr.pos = getPosition(receiveActionNode);
return workerReceiveExpr;
}
@Override
public BLangNode transform(SyncSendActionNode syncSendActionNode) {
BLangWorkerSyncSendExpr workerSendExpr = TreeBuilder.createWorkerSendSyncExprNode();
workerSendExpr.setWorkerName(createIdentifier(
syncSendActionNode.peerWorker().name()));
workerSendExpr.expr = createExpression(syncSendActionNode.expression());
workerSendExpr.pos = getPosition(syncSendActionNode);
return workerSendExpr;
}
@Override
public BLangNode transform(ImplicitAnonymousFunctionExpressionNode implicitAnonymousFunctionExpressionNode) {
BLangArrowFunction arrowFunction = (BLangArrowFunction) TreeBuilder.createArrowFunctionNode();
arrowFunction.pos = getPosition(implicitAnonymousFunctionExpressionNode);
arrowFunction.functionName = createIdentifier(arrowFunction.pos,
anonymousModelHelper.getNextAnonymousFunctionKey(packageID));
Node param = implicitAnonymousFunctionExpressionNode.params();
if (param.kind() == SyntaxKind.INFER_PARAM_LIST) {
ImplicitAnonymousFunctionParameters paramsNode = (ImplicitAnonymousFunctionParameters) param;
SeparatedNodeList<SimpleNameReferenceNode> paramList = paramsNode.parameters();
for (SimpleNameReferenceNode child : paramList) {
BLangUserDefinedType userDefinedType = (BLangUserDefinedType) child.apply(this);
BLangSimpleVariable parameter = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
parameter.name = userDefinedType.typeName;
parameter.pos = getPosition(child);
arrowFunction.params.add(parameter);
}
} else {
BLangUserDefinedType userDefinedType = (BLangUserDefinedType) param.apply(this);
BLangSimpleVariable parameter = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
parameter.name = userDefinedType.typeName;
parameter.pos = getPosition(param);
arrowFunction.params.add(parameter);
}
arrowFunction.body = new BLangExprFunctionBody();
arrowFunction.body.expr = createExpression(implicitAnonymousFunctionExpressionNode.expression());
arrowFunction.body.pos = arrowFunction.body.expr.pos;
return arrowFunction;
}
@Override
public BLangNode transform(CommitActionNode commitActionNode) {
BLangCommitExpr commitExpr = TreeBuilder.createCommitExpressionNode();
commitExpr.pos = getPosition(commitActionNode);
return commitExpr;
}
@Override
public BLangNode transform(FlushActionNode flushActionNode) {
BLangWorkerFlushExpr workerFlushExpr = TreeBuilder.createWorkerFlushExpressionNode();
Node optionalPeerWorker = flushActionNode.peerWorker().orElse(null);
if (optionalPeerWorker != null) {
SimpleNameReferenceNode peerWorker = (SimpleNameReferenceNode) optionalPeerWorker;
workerFlushExpr.workerIdentifier = createIdentifier(peerWorker.name());
}
workerFlushExpr.pos = getPosition(flushActionNode);
return workerFlushExpr;
}
@Override
public BLangNode transform(LetExpressionNode letExpressionNode) {
BLangLetExpression letExpr = (BLangLetExpression) TreeBuilder.createLetExpressionNode();
letExpr.pos = getPosition(letExpressionNode);
letExpr.expr = createExpression(letExpressionNode.expression());
List<BLangLetVariable> letVars = new ArrayList<>();
for (LetVariableDeclarationNode letVarDecl : letExpressionNode.letVarDeclarations()) {
letVars.add(createLetVariable(letVarDecl));
}
letExpr.letVarDeclarations = letVars;
return letExpr;
}
public BLangLetVariable createLetVariable(LetVariableDeclarationNode letVarDecl) {
BLangLetVariable letVar = TreeBuilder.createLetVariableNode();
VariableDefinitionNode varDefNode = createBLangVarDef(getPosition(letVarDecl), letVarDecl.typedBindingPattern(),
Optional.of(letVarDecl.expression()), Optional.empty());
varDefNode.getVariable().addFlag(Flag.FINAL);
List<BLangNode> annots = applyAll(letVarDecl.annotations());
for (BLangNode node : annots) {
varDefNode.getVariable().addAnnotationAttachment((AnnotationAttachmentNode) node);
}
letVar.definitionNode = varDefNode;
return letVar;
}
@Override
public BLangNode transform(MappingBindingPatternNode mappingBindingPatternNode) {
BLangRecordVarRef recordVarRef = (BLangRecordVarRef) TreeBuilder.createRecordVariableReferenceNode();
recordVarRef.pos = getPosition(mappingBindingPatternNode);
List<BLangRecordVarRefKeyValue> expressions = new ArrayList<>();
for (BindingPatternNode expr : mappingBindingPatternNode.fieldBindingPatterns()) {
if (expr.kind() == SyntaxKind.REST_BINDING_PATTERN) {
recordVarRef.restParam = createExpression(expr);
} else {
expressions.add(createRecordVarKeyValue(expr));
}
}
recordVarRef.recordRefFields = expressions;
return recordVarRef;
}
private BLangRecordVarRefKeyValue createRecordVarKeyValue(BindingPatternNode expr) {
BLangRecordVarRefKeyValue keyValue = new BLangRecordVarRefKeyValue();
if (expr instanceof FieldBindingPatternFullNode) {
FieldBindingPatternFullNode fullNode = (FieldBindingPatternFullNode) expr;
keyValue.variableName = createIdentifier(fullNode.variableName().name());
keyValue.variableReference = createExpression(fullNode.bindingPattern());
} else {
FieldBindingPatternVarnameNode varnameNode = (FieldBindingPatternVarnameNode) expr;
keyValue.variableName = createIdentifier(varnameNode.variableName().name());
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = getPosition(varnameNode.variableName());
varRef.variableName = createIdentifier(varnameNode.variableName().name());
varRef.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
keyValue.variableReference = varRef;
}
return keyValue;
}
@Override
public BLangNode transform(ListBindingPatternNode listBindingPatternNode) {
BLangTupleVarRef tupleVarRef = (BLangTupleVarRef) TreeBuilder.createTupleVariableReferenceNode();
List<BLangExpression> expressions = new ArrayList<>();
for (BindingPatternNode expr : listBindingPatternNode.bindingPatterns()) {
if (expr.kind() == SyntaxKind.REST_BINDING_PATTERN) {
tupleVarRef.restParam = createExpression(expr);
} else {
expressions.add(createExpression(expr));
}
}
tupleVarRef.expressions = expressions;
tupleVarRef.pos = getPosition(listBindingPatternNode);
return tupleVarRef;
}
@Override
public BLangNode transform(RestBindingPatternNode restBindingPatternNode) {
return createExpression(restBindingPatternNode.variableName());
}
@Override
public BLangNode transform(CaptureBindingPatternNode captureBindingPatternNode) {
return createExpression(captureBindingPatternNode.variableName());
}
@Override
public BLangNode transform(WildcardBindingPatternNode wildcardBindingPatternNode) {
BLangSimpleVarRef ignoreVarRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
BLangIdentifier ignore = (BLangIdentifier) TreeBuilder.createIdentifierNode();
ignore.value = Names.IGNORE.value;
ignoreVarRef.variableName = ignore;
ignore.pos = getPosition(wildcardBindingPatternNode);
return ignoreVarRef;
}
@Override
public BLangNode transform(ErrorBindingPatternNode errorBindingPatternNode) {
BLangErrorVarRef errorVarRef = (BLangErrorVarRef) TreeBuilder.createErrorVariableReferenceNode();
errorVarRef.pos = getPosition(errorBindingPatternNode);
Optional<Node> errorTypeRef = errorBindingPatternNode.typeReference();
if (errorTypeRef.isPresent()) {
errorVarRef.typeNode = createTypeNode(errorTypeRef.get());
}
SeparatedNodeList<BindingPatternNode> argListBindingPatterns = errorBindingPatternNode.argListBindingPatterns();
int numberOfArgs = argListBindingPatterns.size();
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>();
for (int position = 0; position < numberOfArgs; position++) {
BindingPatternNode bindingPatternNode = argListBindingPatterns.get(position);
switch (bindingPatternNode.kind()) {
case CAPTURE_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
if (position == 0) {
errorVarRef.message = (BLangVariableReference) createExpression(bindingPatternNode);
break;
}
case ERROR_BINDING_PATTERN:
errorVarRef.cause = (BLangVariableReference) createExpression(bindingPatternNode);
break;
case NAMED_ARG_BINDING_PATTERN:
namedArgs.add((BLangNamedArgsExpression) bindingPatternNode.apply(this));
break;
default:
errorVarRef.restVar = (BLangVariableReference) createExpression(bindingPatternNode);
}
}
errorVarRef.detail = namedArgs;
return errorVarRef;
}
@Override
public BLangNode transform(NamedArgBindingPatternNode namedArgBindingPatternNode) {
BLangNamedArgsExpression namedArgsExpression = (BLangNamedArgsExpression) TreeBuilder.createNamedArgNode();
namedArgsExpression.pos = getPosition(namedArgBindingPatternNode);
namedArgsExpression.name = createIdentifier(namedArgBindingPatternNode.argName());
namedArgsExpression.expr = createExpression(namedArgBindingPatternNode.bindingPattern());
return namedArgsExpression;
}
@Override
public BLangNode transform(ReturnStatementNode returnStmtNode) {
BLangReturn bLReturn = (BLangReturn) TreeBuilder.createReturnNode();
bLReturn.pos = getPosition(returnStmtNode);
if (returnStmtNode.expression().isPresent()) {
bLReturn.expr = createExpression(returnStmtNode.expression().get());
} else {
BLangLiteral nilLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
nilLiteral.pos = getPosition(returnStmtNode);
nilLiteral.value = Names.NIL_VALUE;
nilLiteral.type = symTable.nilType;
bLReturn.expr = nilLiteral;
}
return bLReturn;
}
@Override
public BLangNode transform(PanicStatementNode panicStmtNode) {
BLangPanic bLPanic = (BLangPanic) TreeBuilder.createPanicNode();
bLPanic.pos = getPosition(panicStmtNode);
bLPanic.expr = createExpression(panicStmtNode.expression());
return bLPanic;
}
@Override
public BLangNode transform(ContinueStatementNode continueStmtNode) {
BLangContinue bLContinue = (BLangContinue) TreeBuilder.createContinueNode();
bLContinue.pos = getPosition(continueStmtNode);
return bLContinue;
}
@Override
public BLangNode transform(ListenerDeclarationNode listenerDeclarationNode) {
Token visibilityQualifier = null;
if (listenerDeclarationNode.visibilityQualifier().isPresent()) {
visibilityQualifier = listenerDeclarationNode.visibilityQualifier().get();
}
BLangSimpleVariable var = new SimpleVarBuilder()
.with(listenerDeclarationNode.variableName())
.setTypeByNode(listenerDeclarationNode.typeDescriptor().orElse(null))
.setExpressionByNode(listenerDeclarationNode.initializer())
.setVisibility(visibilityQualifier)
.isListenerVar()
.build();
var.pos = getPositionWithoutMetadata(listenerDeclarationNode);
var.name.pos = getPosition(listenerDeclarationNode.variableName());
var.annAttachments = applyAll(getAnnotations(listenerDeclarationNode.metadata()));
return var;
}
@Override
public BLangNode transform(BreakStatementNode breakStmtNode) {
BLangBreak bLBreak = (BLangBreak) TreeBuilder.createBreakNode();
bLBreak.pos = getPosition(breakStmtNode);
return bLBreak;
}
@Override
public BLangNode transform(AssignmentStatementNode assignmentStmtNode) {
SyntaxKind lhsKind = assignmentStmtNode.varRef().kind();
switch (lhsKind) {
case LIST_BINDING_PATTERN:
return createTupleDestructureStatement(assignmentStmtNode);
case MAPPING_BINDING_PATTERN:
return createRecordDestructureStatement(assignmentStmtNode);
case ERROR_BINDING_PATTERN:
return createErrorDestructureStatement(assignmentStmtNode);
default:
break;
}
BLangAssignment bLAssignment = (BLangAssignment) TreeBuilder.createAssignmentNode();
BLangExpression lhsExpr = createExpression(assignmentStmtNode.varRef());
validateLvexpr(lhsExpr, DiagnosticErrorCode.INVALID_INVOCATION_LVALUE_ASSIGNMENT);
bLAssignment.setExpression(createExpression(assignmentStmtNode.expression()));
bLAssignment.pos = getPosition(assignmentStmtNode);
bLAssignment.varRef = lhsExpr;
return bLAssignment;
}
public BLangNode createTupleDestructureStatement(AssignmentStatementNode assignmentStmtNode) {
BLangTupleDestructure tupleDestructure =
(BLangTupleDestructure) TreeBuilder.createTupleDestructureStatementNode();
tupleDestructure.varRef = (BLangTupleVarRef) createExpression(assignmentStmtNode.varRef());
tupleDestructure.setExpression(createExpression(assignmentStmtNode.expression()));
tupleDestructure.pos = getPosition(assignmentStmtNode);
return tupleDestructure;
}
public BLangNode createRecordDestructureStatement(AssignmentStatementNode assignmentStmtNode) {
BLangRecordDestructure recordDestructure =
(BLangRecordDestructure) TreeBuilder.createRecordDestructureStatementNode();
recordDestructure.varRef = (BLangRecordVarRef) createExpression(assignmentStmtNode.varRef());
recordDestructure.setExpression(createExpression(assignmentStmtNode.expression()));
recordDestructure.pos = getPosition(assignmentStmtNode);
return recordDestructure;
}
public BLangNode createErrorDestructureStatement(AssignmentStatementNode assignmentStmtNode) {
BLangErrorDestructure errorDestructure =
(BLangErrorDestructure) TreeBuilder.createErrorDestructureStatementNode();
errorDestructure.varRef = (BLangErrorVarRef) createExpression(assignmentStmtNode.varRef());
errorDestructure.setExpression(createExpression(assignmentStmtNode.expression()));
errorDestructure.pos = getPosition(assignmentStmtNode);
return errorDestructure;
}
@Override
public BLangNode transform(CompoundAssignmentStatementNode compoundAssignmentStmtNode) {
BLangCompoundAssignment bLCompAssignment = (BLangCompoundAssignment) TreeBuilder.createCompoundAssignmentNode();
bLCompAssignment.setExpression(createExpression(compoundAssignmentStmtNode.rhsExpression()));
bLCompAssignment
.setVariable((VariableReferenceNode) createExpression(compoundAssignmentStmtNode.lhsExpression()));
bLCompAssignment.pos = getPosition(compoundAssignmentStmtNode);
bLCompAssignment.opKind = OperatorKind.valueFrom(compoundAssignmentStmtNode.binaryOperator().text());
return bLCompAssignment;
}
private void validateLvexpr(ExpressionNode lExprNode, DiagnosticCode errorCode) {
if (lExprNode.getKind() == NodeKind.INVOCATION) {
dlog.error(((BLangInvocation) lExprNode).pos, errorCode);
}
if (lExprNode.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR
|| lExprNode.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
validateLvexpr(((BLangAccessExpression) lExprNode).expr, errorCode);
}
}
@Override
public BLangNode transform(DoStatementNode doStatementNode) {
BLangDo bLDo = (BLangDo) TreeBuilder.createDoNode();
bLDo.pos = getPosition(doStatementNode);
BLangBlockStmt bLBlockStmt = (BLangBlockStmt) doStatementNode.blockStatement().apply(this);
bLBlockStmt.pos = getPosition(doStatementNode.blockStatement());
bLDo.setBody(bLBlockStmt);
doStatementNode.onFailClause().ifPresent(onFailClauseNode -> {
bLDo.setOnFailClause(
(org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this)));
});
return bLDo;
}
@Override
public BLangNode transform(FailStatementNode failStatementNode) {
BLangFail bLFail = (BLangFail) TreeBuilder.createFailNode();
bLFail.pos = getPosition(failStatementNode);
bLFail.expr = createExpression(failStatementNode.expression());
return bLFail;
}
@Override
public BLangNode transform(WhileStatementNode whileStmtNode) {
BLangWhile bLWhile = (BLangWhile) TreeBuilder.createWhileNode();
bLWhile.setCondition(createExpression(whileStmtNode.condition()));
bLWhile.pos = getPosition(whileStmtNode);
BLangBlockStmt bLBlockStmt = (BLangBlockStmt) whileStmtNode.whileBody().apply(this);
bLBlockStmt.pos = getPosition(whileStmtNode.whileBody());
bLWhile.setBody(bLBlockStmt);
whileStmtNode.onFailClause().ifPresent(onFailClauseNode -> {
bLWhile.setOnFailClause(
(org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this)));
});
return bLWhile;
}
@Override
public BLangNode transform(IfElseStatementNode ifElseStmtNode) {
BLangIf bLIf = (BLangIf) TreeBuilder.createIfElseStatementNode();
bLIf.pos = getPosition(ifElseStmtNode);
bLIf.setCondition(createExpression(ifElseStmtNode.condition()));
bLIf.setBody((BLangBlockStmt) ifElseStmtNode.ifBody().apply(this));
ifElseStmtNode.elseBody().ifPresent(elseBody -> {
ElseBlockNode elseNode = (ElseBlockNode) elseBody;
bLIf.setElseStatement(
(org.ballerinalang.model.tree.statements.StatementNode) elseNode.elseBody().apply(this));
});
return bLIf;
}
@Override
public BLangNode transform(BlockStatementNode blockStatement) {
BLangBlockStmt bLBlockStmt = (BLangBlockStmt) TreeBuilder.createBlockNode();
this.isInLocalContext = true;
bLBlockStmt.stmts = generateBLangStatements(blockStatement.statements());
this.isInLocalContext = false;
bLBlockStmt.pos = getPosition(blockStatement);
SyntaxKind parent = blockStatement.parent().kind();
if (parent == SyntaxKind.IF_ELSE_STATEMENT || parent == SyntaxKind.ELSE_BLOCK) {
bLBlockStmt.pos = expandLeft(bLBlockStmt.pos, getPosition(blockStatement.parent()));
}
return bLBlockStmt;
}
@Override
public BLangNode transform(RollbackStatementNode rollbackStatementNode) {
BLangRollback rollbackStmt = (BLangRollback) TreeBuilder.createRollbackNode();
rollbackStmt.pos = getPosition(rollbackStatementNode);
if (rollbackStatementNode.expression().isPresent()) {
rollbackStmt.expr = createExpression(rollbackStatementNode.expression().get());
}
return rollbackStmt;
}
@Override
public BLangNode transform(LockStatementNode lockStatementNode) {
BLangLock lockNode = (BLangLock) TreeBuilder.createLockNode();
lockNode.pos = getPosition(lockStatementNode);
BLangBlockStmt lockBlock = (BLangBlockStmt) lockStatementNode.blockStatement().apply(this);
lockBlock.pos = getPosition(lockStatementNode.blockStatement());
lockNode.setBody(lockBlock);
lockStatementNode.onFailClause().ifPresent(onFailClauseNode -> {
lockNode.setOnFailClause(
(org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this)));
});
return lockNode;
}
@Override
public BLangNode transform(TypedescTypeDescriptorNode typedescTypeDescriptorNode) {
BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
refType.typeKind = TypeKind.TYPEDESC;
refType.pos = getPosition(typedescTypeDescriptorNode);
Optional<TypeParameterNode> node = typedescTypeDescriptorNode.typedescTypeParamsNode();
if (node.isPresent()) {
BLangConstrainedType constrainedType = (BLangConstrainedType) TreeBuilder.createConstrainedTypeNode();
constrainedType.type = refType;
constrainedType.constraint = createTypeNode(node.get().typeNode());
constrainedType.pos = refType.pos;
return constrainedType;
}
return refType;
}
@Override
public BLangNode transform(VariableDeclarationNode varDeclaration) {
return (BLangNode) createBLangVarDef(getPosition(varDeclaration), varDeclaration.typedBindingPattern(),
varDeclaration.initializer(), varDeclaration.finalKeyword());
}
public BLangNode transform(XmlTypeDescriptorNode xmlTypeDescriptorNode) {
BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
refType.typeKind = TypeKind.XML;
refType.pos = getPosition(xmlTypeDescriptorNode);
Optional<TypeParameterNode> node = xmlTypeDescriptorNode.xmlTypeParamsNode();
if (node.isPresent()) {
BLangConstrainedType constrainedType = (BLangConstrainedType) TreeBuilder.createConstrainedTypeNode();
constrainedType.type = refType;
constrainedType.constraint = createTypeNode(node.get().typeNode());
constrainedType.pos = getPosition(xmlTypeDescriptorNode);
return constrainedType;
}
return refType;
}
private VariableDefinitionNode createBLangVarDef(Location location,
TypedBindingPatternNode typedBindingPattern,
Optional<io.ballerina.compiler.syntax.tree.ExpressionNode> initializer,
Optional<Token> finalKeyword) {
BindingPatternNode bindingPattern = typedBindingPattern.bindingPattern();
BLangVariable variable = getBLangVariableNode(bindingPattern);
List<Token> qualifiers = new ArrayList<>();
if (finalKeyword.isPresent()) {
qualifiers.add(finalKeyword.get());
}
NodeList<Token> qualifierList = NodeFactory.createNodeList(qualifiers);
switch (bindingPattern.kind()) {
case CAPTURE_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
BLangSimpleVariableDef bLVarDef =
(BLangSimpleVariableDef) TreeBuilder.createSimpleVariableDefinitionNode();
bLVarDef.pos = variable.pos = location;
BLangExpression expr = initializer.isPresent() ? createExpression(initializer.get()) : null;
variable.setInitialExpression(expr);
bLVarDef.setVariable(variable);
if (finalKeyword.isPresent()) {
variable.flagSet.add(Flag.FINAL);
}
TypeDescriptorNode typeDesc = typedBindingPattern.typeDescriptor();
variable.isDeclaredWithVar = isDeclaredWithVar(typeDesc);
if (!variable.isDeclaredWithVar) {
variable.setTypeNode(createTypeNode(typeDesc));
}
return bLVarDef;
case MAPPING_BINDING_PATTERN:
initializeBLangVariable(variable, typedBindingPattern.typeDescriptor(), initializer,
qualifierList);
return createRecordVariableDef(variable);
case LIST_BINDING_PATTERN:
initializeBLangVariable(variable, typedBindingPattern.typeDescriptor(), initializer,
qualifierList);
return createTupleVariableDef(variable);
case ERROR_BINDING_PATTERN:
initializeBLangVariable(variable, typedBindingPattern.typeDescriptor(), initializer,
qualifierList);
return createErrorVariableDef(variable);
default:
throw new RuntimeException(
"Syntax kind is not a valid binding pattern " + typedBindingPattern.bindingPattern().kind());
}
}
private void initializeBLangVariable(BLangVariable var, TypeDescriptorNode type,
Optional<io.ballerina.compiler.syntax.tree.ExpressionNode> initializer,
NodeList<Token> qualifiers) {
for (Token qualifier : qualifiers) {
SyntaxKind kind = qualifier.kind();
if (kind == SyntaxKind.FINAL_KEYWORD) {
markVariableWithFlag(var, Flag.FINAL);
} else if (qualifier.kind() == SyntaxKind.CONFIGURABLE_KEYWORD) {
var.flagSet.add(Flag.CONFIGURABLE);
var.flagSet.add(Flag.FINAL);
if (initializer.get().kind() == SyntaxKind.REQUIRED_EXPRESSION) {
var.flagSet.add(Flag.REQUIRED);
initializer = Optional.empty();
}
} else if (kind == SyntaxKind.ISOLATED_KEYWORD) {
var.flagSet.add(Flag.ISOLATED);
}
}
var.isDeclaredWithVar = isDeclaredWithVar(type);
if (!var.isDeclaredWithVar) {
var.setTypeNode(createTypeNode(type));
}
if (initializer.isPresent()) {
var.setInitialExpression(createExpression(initializer.get()));
}
}
private BLangRecordVariableDef createRecordVariableDef(BLangVariable var) {
BLangRecordVariableDef varDefNode = (BLangRecordVariableDef) TreeBuilder.createRecordVariableDefinitionNode();
varDefNode.pos = var.pos;
varDefNode.setVariable(var);
return varDefNode;
}
private BLangTupleVariableDef createTupleVariableDef(BLangVariable tupleVar) {
BLangTupleVariableDef varDefNode = (BLangTupleVariableDef) TreeBuilder.createTupleVariableDefinitionNode();
varDefNode.pos = tupleVar.pos;
varDefNode.setVariable(tupleVar);
return varDefNode;
}
private BLangErrorVariableDef createErrorVariableDef(BLangVariable errorVar) {
BLangErrorVariableDef varDefNode = (BLangErrorVariableDef) TreeBuilder.createErrorVariableDefinitionNode();
varDefNode.pos = errorVar.pos;
varDefNode.setVariable(errorVar);
return varDefNode;
}
@Override
public BLangNode transform(ExpressionStatementNode expressionStatement) {
SyntaxKind kind = expressionStatement.expression().kind();
switch (kind) {
case ASYNC_SEND_ACTION:
return expressionStatement.expression().apply(this);
default:
BLangExpressionStmt bLExpressionStmt =
(BLangExpressionStmt) TreeBuilder.createExpressionStatementNode();
bLExpressionStmt.expr = createExpression(expressionStatement.expression());
bLExpressionStmt.pos = getPosition(expressionStatement);
return bLExpressionStmt;
}
}
@Override
public BLangNode transform(AsyncSendActionNode asyncSendActionNode) {
BLangWorkerSend workerSendNode = (BLangWorkerSend) TreeBuilder.createWorkerSendNode();
workerSendNode.setWorkerName(createIdentifier(getPosition(asyncSendActionNode.peerWorker()),
asyncSendActionNode.peerWorker().name()));
workerSendNode.expr = createExpression(asyncSendActionNode.expression());
workerSendNode.pos = getPosition(asyncSendActionNode);
return workerSendNode;
}
@Override
public BLangNode transform(WaitActionNode waitActionNode) {
Node waitFutureExpr = waitActionNode.waitFutureExpr();
if (waitFutureExpr.kind() == SyntaxKind.WAIT_FIELDS_LIST) {
return getWaitForAllExpr((WaitFieldsListNode) waitFutureExpr);
}
BLangWaitExpr waitExpr = TreeBuilder.createWaitExpressionNode();
waitExpr.pos = getPosition(waitActionNode);
waitExpr.exprList = Collections.singletonList(createExpression(waitFutureExpr));
return waitExpr;
}
private BLangWaitForAllExpr getWaitForAllExpr(WaitFieldsListNode waitFields) {
BLangWaitForAllExpr bLangWaitForAll = TreeBuilder.createWaitForAllExpressionNode();
List<BLangWaitKeyValue> exprs = new ArrayList<>();
for (Node waitField : waitFields.waitFields()) {
exprs.add(getWaitForAllExpr(waitField));
}
bLangWaitForAll.keyValuePairs = exprs;
bLangWaitForAll.pos = getPosition(waitFields);
return bLangWaitForAll;
}
private BLangWaitKeyValue getWaitForAllExpr(Node waitFields) {
BLangWaitForAllExpr.BLangWaitKeyValue keyValue = TreeBuilder.createWaitKeyValueNode();
keyValue.pos = getPosition(waitFields);
if (waitFields.kind() == SyntaxKind.WAIT_FIELD) {
WaitFieldNode waitFieldNode = (WaitFieldNode) waitFields;
BLangIdentifier key = createIdentifier(waitFieldNode.fieldName().name());
key.setLiteral(false);
keyValue.key = key;
keyValue.valueExpr = createExpression(waitFieldNode.waitFutureExpr());
return keyValue;
}
SimpleNameReferenceNode varName = (SimpleNameReferenceNode) waitFields;
BLangIdentifier key = createIdentifier(varName.name());
key.setLiteral(false);
keyValue.key = key;
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = getPosition(varName);
varRef.variableName = key;
varRef.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
keyValue.keyExpr = varRef;
return keyValue;
}
@Override
public BLangNode transform(StartActionNode startActionNode) {
BLangNode expression = createActionOrExpression(startActionNode.expression());
BLangInvocation invocation;
if (!(expression instanceof BLangWorkerSend)) {
invocation = (BLangInvocation) expression;
} else {
invocation = (BLangInvocation) ((BLangWorkerSend) expression).expr;
expression = ((BLangWorkerSend) expression).expr;
}
if (expression.getKind() == NodeKind.INVOCATION) {
BLangActionInvocation actionInvocation = (BLangActionInvocation) TreeBuilder.createActionInvocation();
actionInvocation.expr = invocation.expr;
actionInvocation.pkgAlias = invocation.pkgAlias;
actionInvocation.name = invocation.name;
actionInvocation.argExprs = invocation.argExprs;
actionInvocation.flagSet = invocation.flagSet;
actionInvocation.pos = getPosition(startActionNode);
invocation = actionInvocation;
}
invocation.async = true;
invocation.annAttachments = applyAll(startActionNode.annotations());
return invocation;
}
@Override
public BLangNode transform(TransactionStatementNode transactionStatementNode) {
BLangTransaction transaction = (BLangTransaction) TreeBuilder.createTransactionNode();
BLangBlockStmt transactionBlock = (BLangBlockStmt) transactionStatementNode.blockStatement().apply(this);
transactionBlock.pos = getPosition(transactionStatementNode.blockStatement());
transaction.setTransactionBody(transactionBlock);
transaction.pos = getPosition(transactionStatementNode);
transactionStatementNode.onFailClause().ifPresent(onFailClauseNode -> {
transaction.setOnFailClause(
(org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this)));
});
return transaction;
}
@Override
public BLangNode transform(PositionalArgumentNode argumentNode) {
return createExpression(argumentNode.expression());
}
@Override
public BLangNode transform(NamedArgumentNode namedArgumentNode) {
BLangNamedArgsExpression namedArg = (BLangNamedArgsExpression) TreeBuilder.createNamedArgNode();
namedArg.pos = getPosition(namedArgumentNode);
namedArg.name = this.createIdentifier(namedArgumentNode.argumentName().name());
namedArg.expr = createExpression(namedArgumentNode.expression());
return namedArg;
}
@Override
public BLangNode transform(RestArgumentNode restArgumentNode) {
BLangRestArgsExpression varArgs = (BLangRestArgsExpression) TreeBuilder.createVarArgsNode();
varArgs.pos = getPosition(restArgumentNode.ellipsis());
varArgs.expr = createExpression(restArgumentNode.expression());
return varArgs;
}
@Override
public BLangNode transform(RequiredParameterNode requiredParameter) {
BLangSimpleVariable simpleVar = createSimpleVar(requiredParameter.paramName(),
requiredParameter.typeName(), requiredParameter.annotations());
simpleVar.pos = getPosition(requiredParameter);
if (requiredParameter.paramName().isPresent()) {
simpleVar.name.pos = getPosition(requiredParameter.paramName().get());
}
simpleVar.flagSet.add(Flag.REQUIRED_PARAM);
return simpleVar;
}
@Override
public BLangNode transform(IncludedRecordParameterNode includedRecordParameterNode) {
BLangSimpleVariable simpleVar = createSimpleVar(includedRecordParameterNode.paramName(),
includedRecordParameterNode.typeName(), includedRecordParameterNode.annotations());
simpleVar.flagSet.add(INCLUDED);
simpleVar.pos = getPosition(includedRecordParameterNode);
if (includedRecordParameterNode.paramName().isPresent()) {
simpleVar.name.pos = getPosition(includedRecordParameterNode.paramName().get());
}
simpleVar.pos = trimLeft(simpleVar.pos, getPosition(includedRecordParameterNode.typeName()));
return simpleVar;
}
@Override
public BLangNode transform(DefaultableParameterNode defaultableParameter) {
BLangSimpleVariable simpleVar = createSimpleVar(defaultableParameter.paramName(),
defaultableParameter.typeName(),
defaultableParameter.annotations());
simpleVar.setInitialExpression(createExpression(defaultableParameter.expression()));
simpleVar.flagSet.add(Flag.DEFAULTABLE_PARAM);
simpleVar.pos = getPosition(defaultableParameter);
return simpleVar;
}
@Override
public BLangNode transform(RestParameterNode restParameter) {
BLangSimpleVariable bLSimpleVar = createSimpleVar(restParameter.paramName(), restParameter.typeName(),
restParameter.annotations());
BLangArrayType bLArrayType = (BLangArrayType) TreeBuilder.createArrayTypeNode();
bLArrayType.elemtype = bLSimpleVar.typeNode;
bLArrayType.dimensions = 1;
bLSimpleVar.typeNode = bLArrayType;
bLArrayType.pos = getPosition(restParameter.typeName());
bLSimpleVar.flagSet.add(Flag.REST_PARAM);
bLSimpleVar.pos = getPosition(restParameter);
return bLSimpleVar;
}
@Override
public BLangNode transform(OptionalTypeDescriptorNode optTypeDescriptor) {
BLangValueType nilTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
nilTypeNode.pos = getPosition(optTypeDescriptor.questionMarkToken());
nilTypeNode.typeKind = TypeKind.NIL;
BLangUnionTypeNode unionTypeNode = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode();
unionTypeNode.memberTypeNodes.add(createTypeNode(optTypeDescriptor.typeDescriptor()));
unionTypeNode.memberTypeNodes.add(nilTypeNode);
unionTypeNode.nullable = true;
unionTypeNode.pos = getPosition(optTypeDescriptor);
return unionTypeNode;
}
@Override
public BLangNode transform(FunctionTypeDescriptorNode functionTypeDescriptorNode) {
BLangFunctionTypeNode functionTypeNode = (BLangFunctionTypeNode) TreeBuilder.createFunctionTypeNode();
functionTypeNode.pos = getPosition(functionTypeDescriptorNode);
functionTypeNode.returnsKeywordExists = true;
if (functionTypeDescriptorNode.functionSignature().isPresent()) {
FunctionSignatureNode funcSignature = functionTypeDescriptorNode.functionSignature().get();
for (ParameterNode child : funcSignature.parameters()) {
SimpleVariableNode param = (SimpleVariableNode) child.apply(this);
if (child.kind() == SyntaxKind.REST_PARAM) {
functionTypeNode.restParam = (BLangSimpleVariable) param;
} else {
functionTypeNode.params.add((BLangVariable) param);
}
}
Optional<ReturnTypeDescriptorNode> retNode = funcSignature.returnTypeDesc();
if (retNode.isPresent()) {
ReturnTypeDescriptorNode returnType = retNode.get();
functionTypeNode.returnTypeNode = createTypeNode(returnType.type());
} else {
BLangValueType bLValueType = (BLangValueType) TreeBuilder.createValueTypeNode();
bLValueType.pos = getPosition(funcSignature);
bLValueType.typeKind = TypeKind.NIL;
functionTypeNode.returnTypeNode = bLValueType;
}
} else {
functionTypeNode.flagSet.add(Flag.ANY_FUNCTION);
}
functionTypeNode.flagSet.add(Flag.PUBLIC);
for (Token token : functionTypeDescriptorNode.qualifierList()) {
if (token.kind() == SyntaxKind.ISOLATED_KEYWORD) {
functionTypeNode.flagSet.add(Flag.ISOLATED);
} else if (token.kind() == SyntaxKind.TRANSACTIONAL_KEYWORD) {
functionTypeNode.flagSet.add(Flag.TRANSACTIONAL);
}
}
return functionTypeNode;
}
@Override
public BLangNode transform(ParameterizedTypeDescriptorNode parameterizedTypeDescNode) {
BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
BLangBuiltInRefTypeNode typeNode =
(BLangBuiltInRefTypeNode) createBuiltInTypeNode(parameterizedTypeDescNode.parameterizedType());
refType.typeKind = typeNode.typeKind;
refType.pos = typeNode.pos;
BLangConstrainedType constrainedType = (BLangConstrainedType) TreeBuilder.createConstrainedTypeNode();
constrainedType.type = refType;
constrainedType.constraint = createTypeNode(parameterizedTypeDescNode.typeParameter().typeNode());
constrainedType.pos = getPosition(parameterizedTypeDescNode);
return constrainedType;
}
@Override
public BLangNode transform(KeySpecifierNode keySpecifierNode) {
BLangTableKeySpecifier tableKeySpecifierNode =
(BLangTableKeySpecifier) TreeBuilder.createTableKeySpecifierNode();
tableKeySpecifierNode.pos = getPosition(keySpecifierNode);
for (Token field : keySpecifierNode.fieldNames()) {
tableKeySpecifierNode.addFieldNameIdentifier(createIdentifier(field));
}
return tableKeySpecifierNode;
}
@Override
public BLangNode transform(KeyTypeConstraintNode keyTypeConstraintNode) {
BLangTableKeyTypeConstraint tableKeyTypeConstraint = new BLangTableKeyTypeConstraint();
tableKeyTypeConstraint.pos = getPosition(keyTypeConstraintNode);
tableKeyTypeConstraint.keyType = createTypeNode(keyTypeConstraintNode.typeParameterNode());
return tableKeyTypeConstraint;
}
@Override
public BLangNode transform(TableTypeDescriptorNode tableTypeDescriptorNode) {
BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
refType.typeKind = TreeUtils.stringToTypeKind(tableTypeDescriptorNode.tableKeywordToken().text());
refType.pos = getPosition(tableTypeDescriptorNode);
BLangTableTypeNode tableTypeNode = (BLangTableTypeNode) TreeBuilder.createTableTypeNode();
tableTypeNode.pos = getPosition(tableTypeDescriptorNode);
tableTypeNode.type = refType;
tableTypeNode.constraint = createTypeNode(tableTypeDescriptorNode.rowTypeParameterNode());
if (tableTypeDescriptorNode.keyConstraintNode().isPresent()) {
Node constraintNode = tableTypeDescriptorNode.keyConstraintNode().get();
if (constraintNode.kind() == SyntaxKind.KEY_TYPE_CONSTRAINT) {
tableTypeNode.tableKeyTypeConstraint = (BLangTableKeyTypeConstraint) constraintNode.apply(this);
} else if (constraintNode.kind() == SyntaxKind.KEY_SPECIFIER) {
tableTypeNode.tableKeySpecifier = (BLangTableKeySpecifier) constraintNode.apply(this);
}
}
tableTypeNode.isTypeInlineDefined = checkIfAnonymous(tableTypeDescriptorNode);
return tableTypeNode;
}
@Override
public BLangNode transform(SimpleNameReferenceNode simpleNameRefNode) {
BLangUserDefinedType bLUserDefinedType = new BLangUserDefinedType();
bLUserDefinedType.pos = getPosition(simpleNameRefNode);
bLUserDefinedType.typeName =
createIdentifier(simpleNameRefNode.name());
bLUserDefinedType.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
return bLUserDefinedType;
}
@Override
public BLangNode transform(QualifiedNameReferenceNode qualifiedNameReferenceNode) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = getPosition(qualifiedNameReferenceNode);
varRef.variableName = createIdentifier(qualifiedNameReferenceNode.identifier());
varRef.pkgAlias = createIdentifier(qualifiedNameReferenceNode.modulePrefix());
return varRef;
}
@Override
public BLangNode transform(XMLProcessingInstruction xmlProcessingInstruction) {
BLangXMLProcInsLiteral xmlProcInsLiteral =
(BLangXMLProcInsLiteral) TreeBuilder.createXMLProcessingIntsructionLiteralNode();
if (xmlProcessingInstruction.data().isEmpty()) {
BLangLiteral emptyLiteral = createEmptyLiteral();
emptyLiteral.pos = getPosition(xmlProcessingInstruction);
xmlProcInsLiteral.dataFragments.add(emptyLiteral);
} else {
for (Node dataNode : xmlProcessingInstruction.data()) {
xmlProcInsLiteral.dataFragments.add(createExpression(dataNode));
}
}
XMLNameNode target = xmlProcessingInstruction.target();
if (target.kind() == SyntaxKind.XML_SIMPLE_NAME) {
xmlProcInsLiteral.target = createSimpleLiteral(((XMLSimpleNameNode) target).name());
} else {
xmlProcInsLiteral.target = createSimpleLiteral(((XMLQualifiedNameNode) target).prefix());
}
xmlProcInsLiteral.pos = getPosition(xmlProcessingInstruction);
return xmlProcInsLiteral;
}
@Override
public BLangNode transform(XMLComment xmlComment) {
BLangXMLCommentLiteral xmlCommentLiteral = (BLangXMLCommentLiteral) TreeBuilder.createXMLCommentLiteralNode();
Location pos = getPosition(xmlComment);
if (xmlComment.content().isEmpty()) {
BLangLiteral emptyLiteral = createEmptyLiteral();
emptyLiteral.pos = pos;
xmlCommentLiteral.textFragments.add(emptyLiteral);
} else {
for (Node commentNode : xmlComment.content()) {
xmlCommentLiteral.textFragments.add(createExpression(commentNode));
}
}
xmlCommentLiteral.pos = pos;
return xmlCommentLiteral;
}
@Override
public BLangNode transform(XMLElementNode xmlElementNode) {
BLangXMLElementLiteral xmlElement = (BLangXMLElementLiteral) TreeBuilder.createXMLElementLiteralNode();
xmlElement.startTagName = createExpression(xmlElementNode.startTag());
xmlElement.endTagName = createExpression(xmlElementNode.endTag());
for (Node node : xmlElementNode.content()) {
if (node.kind() == SyntaxKind.XML_TEXT) {
xmlElement.children.add(createSimpleLiteral(((XMLTextNode) node).content()));
continue;
}
xmlElement.children.add(createExpression(node));
}
for (XMLAttributeNode attribute : xmlElementNode.startTag().attributes()) {
xmlElement.attributes.add((BLangXMLAttribute) attribute.apply(this));
}
xmlElement.pos = getPosition(xmlElementNode);
xmlElement.isRoot = true;
return xmlElement;
}
@Override
public BLangNode transform(XMLAttributeNode xmlAttributeNode) {
BLangXMLAttribute xmlAttribute = (BLangXMLAttribute) TreeBuilder.createXMLAttributeNode();
xmlAttribute.value = (BLangXMLQuotedString) xmlAttributeNode.value().apply(this);
xmlAttribute.name = createExpression(xmlAttributeNode.attributeName());
xmlAttribute.pos = getPosition(xmlAttributeNode);
return xmlAttribute;
}
@Override
public BLangNode transform(ByteArrayLiteralNode byteArrayLiteralNode) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.pos = getPosition(byteArrayLiteralNode);
literal.type = symTable.getTypeFromTag(TypeTags.BYTE_ARRAY);
literal.type.tag = TypeTags.BYTE_ARRAY;
literal.value = getValueFromByteArrayNode(byteArrayLiteralNode);
literal.originalValue = String.valueOf(literal.value);
return literal;
}
@Override
public BLangNode transform(XMLAttributeValue xmlAttributeValue) {
BLangXMLQuotedString quotedString = (BLangXMLQuotedString) TreeBuilder.createXMLQuotedStringNode();
quotedString.pos = getPosition(xmlAttributeValue);
if (xmlAttributeValue.startQuote().kind() == SyntaxKind.SINGLE_QUOTE_TOKEN) {
quotedString.quoteType = QuoteType.SINGLE_QUOTE;
} else {
quotedString.quoteType = QuoteType.DOUBLE_QUOTE;
}
if (xmlAttributeValue.value().isEmpty()) {
BLangLiteral emptyLiteral = createEmptyLiteral();
emptyLiteral.pos = getPosition(xmlAttributeValue);
quotedString.textFragments.add(emptyLiteral);
} else if (xmlAttributeValue.value().size() == 1 &&
xmlAttributeValue.value().get(0).kind() == SyntaxKind.INTERPOLATION) {
quotedString.textFragments.add(createExpression(xmlAttributeValue.value().get(0)));
BLangLiteral emptyLiteral = createEmptyLiteral();
emptyLiteral.pos = getPosition(xmlAttributeValue);
quotedString.textFragments.add(emptyLiteral);
} else {
for (Node value : xmlAttributeValue.value()) {
if (value.kind() == SyntaxKind.XML_TEXT_CONTENT) {
Token token = (Token) value;
String normalizedValue = XmlFactory.XMLTextUnescape.unescape(token.text());
quotedString.textFragments.add(createStringLiteral(normalizedValue, getPosition(value)));
} else {
quotedString.textFragments.add(createExpression(value));
}
}
}
return quotedString;
}
@Override
public BLangNode transform(XMLStartTagNode startTagNode) {
return startTagNode.name().apply(this);
}
@Override
public BLangNode transform(XMLEndTagNode endTagNode) {
return endTagNode.name().apply(this);
}
@Override
public BLangNode transform(XMLTextNode xmlTextNode) {
return createExpression(xmlTextNode.content());
}
private BLangNode createXMLEmptyLiteral(TemplateExpressionNode expressionNode) {
BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode();
xmlTextLiteral.pos = getPosition(expressionNode);
xmlTextLiteral.textFragments.add(createEmptyStringLiteral(xmlTextLiteral.pos));
return xmlTextLiteral;
}
private BLangNode createXMLTextLiteral(List<Node> expressionNode) {
BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode();
xmlTextLiteral.pos = getPosition(expressionNode.get(0));
for (Node node : expressionNode) {
xmlTextLiteral.textFragments.add(createExpression(node));
}
xmlTextLiteral.textFragments.add(createEmptyStringLiteral(xmlTextLiteral.pos));
return xmlTextLiteral;
}
private BLangNode createXMLTextLiteral(Node expressionNode) {
BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode();
xmlTextLiteral.pos = getPosition(expressionNode);
xmlTextLiteral.textFragments.add(createExpression(expressionNode));
return xmlTextLiteral;
}
@Override
public BLangNode transform(XMLNamespaceDeclarationNode xmlnsDeclNode) {
BLangXMLNS xmlns = (BLangXMLNS) TreeBuilder.createXMLNSNode();
BLangIdentifier prefixIdentifier = createIdentifier(xmlnsDeclNode.namespacePrefix().orElse(null));
BLangExpression namespaceUri = createExpression(xmlnsDeclNode.namespaceuri());
xmlns.namespaceURI = namespaceUri;
xmlns.prefix = prefixIdentifier;
xmlns.pos = getPosition(xmlnsDeclNode);
BLangXMLNSStatement xmlnsStmt = (BLangXMLNSStatement) TreeBuilder.createXMLNSDeclrStatementNode();
xmlnsStmt.xmlnsDecl = xmlns;
xmlnsStmt.pos = getPosition(xmlnsDeclNode);
return xmlnsStmt;
}
@Override
public BLangNode transform(ModuleXMLNamespaceDeclarationNode xmlnsDeclNode) {
BLangXMLNS xmlns = (BLangXMLNS) TreeBuilder.createXMLNSNode();
BLangIdentifier prefixIdentifier = createIdentifier(xmlnsDeclNode.namespacePrefix().orElse(null));
BLangExpression namespaceUri = createExpression(xmlnsDeclNode.namespaceuri());
xmlns.namespaceURI = namespaceUri;
xmlns.prefix = prefixIdentifier;
xmlns.pos = getPosition(xmlnsDeclNode);
return xmlns;
}
@Override
public BLangNode transform(XMLQualifiedNameNode xmlQualifiedNameNode) {
BLangXMLQName xmlName = (BLangXMLQName) TreeBuilder.createXMLQNameNode();
xmlName.localname = createIdentifier(getPosition(xmlQualifiedNameNode.name()),
xmlQualifiedNameNode.name().name());
xmlName.prefix = createIdentifier(getPosition(xmlQualifiedNameNode.prefix()),
xmlQualifiedNameNode.prefix().name());
xmlName.pos = getPosition(xmlQualifiedNameNode);
return xmlName;
}
@Override
public BLangNode transform(XMLSimpleNameNode xmlSimpleNameNode) {
BLangXMLQName xmlName = (BLangXMLQName) TreeBuilder.createXMLQNameNode();
xmlName.localname = createIdentifier(xmlSimpleNameNode.name());
xmlName.prefix = createIdentifier(null, "");
xmlName.pos = getPosition(xmlSimpleNameNode);
return xmlName;
}
@Override
public BLangNode transform(XMLEmptyElementNode xMLEmptyElementNode) {
BLangXMLElementLiteral xmlEmptyElement = (BLangXMLElementLiteral) TreeBuilder.createXMLElementLiteralNode();
xmlEmptyElement.startTagName = createExpression(xMLEmptyElementNode.name());
for (XMLAttributeNode attribute : xMLEmptyElementNode.attributes()) {
xmlEmptyElement.attributes.add((BLangXMLAttribute) attribute.apply(this));
}
xmlEmptyElement.pos = getPosition(xMLEmptyElementNode);
return xmlEmptyElement;
}
@Override
public BLangNode transform(RemoteMethodCallActionNode remoteMethodCallActionNode) {
BLangInvocation.BLangActionInvocation bLangActionInvocation = (BLangInvocation.BLangActionInvocation)
TreeBuilder.createActionInvocation();
bLangActionInvocation.expr = createExpression(remoteMethodCallActionNode.expression());
bLangActionInvocation.argExprs = applyAll(remoteMethodCallActionNode.arguments());
BLangNameReference nameReference = createBLangNameReference(remoteMethodCallActionNode.methodName().name());
bLangActionInvocation.name = (BLangIdentifier) nameReference.name;
bLangActionInvocation.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
bLangActionInvocation.pos = getPosition(remoteMethodCallActionNode);
return bLangActionInvocation;
}
@Override
public BLangNode transform(StreamTypeDescriptorNode streamTypeDescriptorNode) {
BLangType constraint, error = null;
Location pos = getPosition(streamTypeDescriptorNode);
Optional<Node> paramsNode = streamTypeDescriptorNode.streamTypeParamsNode();
boolean hasConstraint = paramsNode.isPresent();
if (!hasConstraint) {
constraint = addValueType(pos, TypeKind.ANY);
} else {
StreamTypeParamsNode params = (StreamTypeParamsNode) paramsNode.get();
if (params.rightTypeDescNode().isPresent()) {
error = createTypeNode(params.rightTypeDescNode().get());
}
constraint = createTypeNode(params.leftTypeDescNode());
}
BLangBuiltInRefTypeNode refType = (BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
refType.typeKind = TypeKind.STREAM;
refType.pos = pos;
BLangStreamType streamType = (BLangStreamType) TreeBuilder.createStreamTypeNode();
streamType.type = refType;
streamType.constraint = constraint;
streamType.error = error;
streamType.pos = pos;
return streamType;
}
@Override
public BLangNode transform(ArrayTypeDescriptorNode arrayTypeDescriptorNode) {
int dimensions = 1;
List<BLangExpression> sizes = new ArrayList<>();
Location position = getPosition(arrayTypeDescriptorNode);
while (true) {
if (arrayTypeDescriptorNode.arrayLength().isEmpty()) {
sizes.add(new BLangLiteral(OPEN_ARRAY_INDICATOR, symTable.intType));
} else {
Node keyExpr = arrayTypeDescriptorNode.arrayLength().get();
if (keyExpr.kind() == SyntaxKind.NUMERIC_LITERAL) {
Token literalToken = ((BasicLiteralNode) keyExpr).literalToken();
if (literalToken.kind() == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN) {
sizes.add(new BLangLiteral(Integer.parseInt(literalToken.text()), symTable.intType));
} else {
sizes.add(new BLangLiteral(Integer.parseInt(literalToken.text(), 16), symTable.intType));
}
} else if (keyExpr.kind() == SyntaxKind.ASTERISK_LITERAL) {
sizes.add(new BLangLiteral(INFERRED_ARRAY_INDICATOR, symTable.intType));
} else {
sizes.add(createExpression(keyExpr));
}
}
if (arrayTypeDescriptorNode.memberTypeDesc().kind() != SyntaxKind.ARRAY_TYPE_DESC) {
break;
}
arrayTypeDescriptorNode = (ArrayTypeDescriptorNode) arrayTypeDescriptorNode.memberTypeDesc();
dimensions++;
}
BLangArrayType arrayTypeNode = (BLangArrayType) TreeBuilder.createArrayTypeNode();
arrayTypeNode.pos = position;
arrayTypeNode.elemtype = createTypeNode(arrayTypeDescriptorNode.memberTypeDesc());
arrayTypeNode.dimensions = dimensions;
arrayTypeNode.sizes = sizes.toArray(new BLangExpression[0]);
return arrayTypeNode;
}
public BLangNode transform(EnumDeclarationNode enumDeclarationNode) {
Boolean publicQualifier = false;
if (enumDeclarationNode.qualifier().isPresent() && enumDeclarationNode.qualifier().get().kind()
== SyntaxKind.PUBLIC_KEYWORD) {
publicQualifier = true;
}
for (Node member : enumDeclarationNode.enumMemberList()) {
EnumMemberNode enumMember = (EnumMemberNode) member;
if (enumMember.identifier().isMissing()) {
continue;
}
addToTop(transformEnumMember(enumMember, publicQualifier));
}
BLangTypeDefinition bLangTypeDefinition = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
if (publicQualifier) {
bLangTypeDefinition.flagSet.add(Flag.PUBLIC);
}
bLangTypeDefinition.flagSet.add(Flag.ENUM);
bLangTypeDefinition.setName((BLangIdentifier) transform(enumDeclarationNode.identifier()));
bLangTypeDefinition.pos = getPosition(enumDeclarationNode);
BLangUnionTypeNode bLangUnionTypeNode = (BLangUnionTypeNode) TreeBuilder.createUnionTypeNode();
bLangUnionTypeNode.pos = bLangTypeDefinition.pos;
for (Node member : enumDeclarationNode.enumMemberList()) {
Node enumMemberIdentifier = ((EnumMemberNode) member).identifier();
if (enumMemberIdentifier.isMissing()) {
continue;
}
bLangUnionTypeNode.memberTypeNodes.add(createTypeNode(enumMemberIdentifier));
}
Collections.reverse(bLangUnionTypeNode.memberTypeNodes);
bLangTypeDefinition.setTypeNode(bLangUnionTypeNode);
bLangTypeDefinition.annAttachments = applyAll(getAnnotations(enumDeclarationNode.metadata()));
bLangTypeDefinition.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(enumDeclarationNode.metadata()));
return bLangTypeDefinition;
}
public BLangConstant transformEnumMember(EnumMemberNode member, Boolean publicQualifier) {
BLangConstant bLangConstant = (BLangConstant) TreeBuilder.createConstantNode();
bLangConstant.pos = getPosition(member);
bLangConstant.flagSet.add(Flag.CONSTANT);
bLangConstant.flagSet.add(Flag.ENUM_MEMBER);
if (publicQualifier) {
bLangConstant.flagSet.add(Flag.PUBLIC);
}
bLangConstant.annAttachments = applyAll(getAnnotations(member.metadata()));
bLangConstant.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(member.metadata()));
bLangConstant.setName((BLangIdentifier) transform(member.identifier()));
BLangExpression deepLiteral;
if (member.constExprNode().isPresent()) {
BLangExpression expression = createExpression(member.constExprNode().orElse(null));
bLangConstant.setInitialExpression(expression);
deepLiteral = createExpression(member.constExprNode().orElse(null));
} else {
BLangLiteral literal = createSimpleLiteral(member.identifier());
bLangConstant.setInitialExpression(literal);
deepLiteral = createSimpleLiteral(member.identifier());
}
BLangValueType typeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
typeNode.pos = symTable.builtinPos;
typeNode.typeKind = TypeKind.STRING;
bLangConstant.setTypeNode(typeNode);
if (deepLiteral instanceof BLangLiteral) {
BLangLiteral literal = (BLangLiteral) deepLiteral;
if (!literal.originalValue.equals("")) {
BLangFiniteTypeNode typeNodeAssociated = (BLangFiniteTypeNode) TreeBuilder.createFiniteTypeNode();
literal.originalValue = null;
typeNodeAssociated.addValue(deepLiteral);
bLangConstant.associatedTypeDefinition = createTypeDefinitionWithTypeNode(typeNodeAssociated);
} else {
bLangConstant.associatedTypeDefinition = null;
}
} else {
BLangFiniteTypeNode typeNodeAssociated = (BLangFiniteTypeNode) TreeBuilder.createFiniteTypeNode();
typeNodeAssociated.addValue(deepLiteral);
bLangConstant.associatedTypeDefinition = createTypeDefinitionWithTypeNode(typeNodeAssociated);
}
return bLangConstant;
}
@Override
public BLangNode transform(QueryExpressionNode queryExprNode) {
BLangQueryExpr queryExpr = (BLangQueryExpr) TreeBuilder.createQueryExpressionNode();
queryExpr.pos = getPosition(queryExprNode);
BLangFromClause fromClause = (BLangFromClause) queryExprNode.queryPipeline().fromClause().apply(this);
queryExpr.queryClauseList.add(fromClause);
for (Node clauseNode : queryExprNode.queryPipeline().intermediateClauses()) {
queryExpr.queryClauseList.add(clauseNode.apply(this));
}
BLangSelectClause selectClause = (BLangSelectClause) queryExprNode.selectClause().apply(this);
queryExpr.queryClauseList.add(selectClause);
Optional<OnConflictClauseNode> onConflict = queryExprNode.onConflictClause();
onConflict.ifPresent(onConflictClauseNode -> queryExpr.queryClauseList.add(onConflictClauseNode.apply(this)));
boolean isTable = false;
boolean isStream = false;
Optional<QueryConstructTypeNode> optionalQueryConstructTypeNode = queryExprNode.queryConstructType();
if (optionalQueryConstructTypeNode.isPresent()) {
QueryConstructTypeNode queryConstructTypeNode = optionalQueryConstructTypeNode.get();
isTable = queryConstructTypeNode.keyword().kind() == SyntaxKind.TABLE_KEYWORD;
isStream = queryConstructTypeNode.keyword().kind() == SyntaxKind.STREAM_KEYWORD;
if (queryConstructTypeNode.keySpecifier().isPresent()) {
for (IdentifierToken fieldNameNode : queryConstructTypeNode.keySpecifier().get().fieldNames()) {
queryExpr.fieldNameIdentifierList.add(createIdentifier(getPosition(fieldNameNode), fieldNameNode));
}
}
}
queryExpr.isStream = isStream;
queryExpr.isTable = isTable;
return queryExpr;
}
public BLangNode transform(OnFailClauseNode onFailClauseNode) {
Location pos = getPosition(onFailClauseNode);
BLangSimpleVariableDef variableDefinitionNode = (BLangSimpleVariableDef) TreeBuilder.
createSimpleVariableDefinitionNode();
BLangSimpleVariable var = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
boolean isDeclaredWithVar = onFailClauseNode.typeDescriptor().kind() == SyntaxKind.VAR_TYPE_DESC;
var.isDeclaredWithVar = isDeclaredWithVar;
if (!isDeclaredWithVar) {
var.setTypeNode(createTypeNode(onFailClauseNode.typeDescriptor()));
}
var.pos = getPosition(onFailClauseNode);
var.setName(this.createIdentifier(onFailClauseNode.failErrorName()));
var.name.pos = getPosition(onFailClauseNode.failErrorName());
variableDefinitionNode.setVariable(var);
variableDefinitionNode.pos = var.name.pos;
BLangOnFailClause onFailClause = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode();
onFailClause.pos = pos;
onFailClause.isDeclaredWithVar = isDeclaredWithVar;
markVariableWithFlag(variableDefinitionNode.getVariable(), Flag.FINAL);
onFailClause.variableDefinitionNode = variableDefinitionNode;
BLangBlockStmt blockNode = (BLangBlockStmt) transform(onFailClauseNode.blockStatement());
blockNode.pos = getPosition(onFailClauseNode);
onFailClause.body = blockNode;
return onFailClause;
}
@Override
public BLangNode transform(LetClauseNode letClauseNode) {
BLangLetClause bLLetClause = (BLangLetClause) TreeBuilder.createLetClauseNode();
bLLetClause.pos = getPosition(letClauseNode);
List<BLangLetVariable> letVars = new ArrayList<>();
for (LetVariableDeclarationNode letVarDeclr : letClauseNode.letVarDeclarations()) {
BLangLetVariable letVar = createLetVariable(letVarDeclr);
letVar.definitionNode.getVariable().addFlag(Flag.FINAL);
letVars.add(letVar);
}
if (!letVars.isEmpty()) {
bLLetClause.letVarDeclarations = letVars;
}
return bLLetClause;
}
@Override
public BLangNode transform(FromClauseNode fromClauseNode) {
BLangFromClause fromClause = (BLangFromClause) TreeBuilder.createFromClauseNode();
fromClause.pos = getPosition(fromClauseNode);
fromClause.collection = createExpression(fromClauseNode.expression());
TypedBindingPatternNode bindingPatternNode = fromClauseNode.typedBindingPattern();
fromClause.variableDefinitionNode = createBLangVarDef(getPosition(bindingPatternNode), bindingPatternNode,
Optional.empty(), Optional.empty());
boolean isDeclaredWithVar = bindingPatternNode.typeDescriptor().kind() == SyntaxKind.VAR_TYPE_DESC;
fromClause.isDeclaredWithVar = isDeclaredWithVar;
return fromClause;
}
@Override
public BLangNode transform(WhereClauseNode whereClauseNode) {
BLangWhereClause whereClause = (BLangWhereClause) TreeBuilder.createWhereClauseNode();
whereClause.pos = getPosition(whereClauseNode);
whereClause.expression = createExpression(whereClauseNode.expression());
return whereClause;
}
@Override
public BLangNode transform(SelectClauseNode selectClauseNode) {
BLangSelectClause selectClause = (BLangSelectClause) TreeBuilder.createSelectClauseNode();
selectClause.pos = getPosition(selectClauseNode);
selectClause.expression = createExpression(selectClauseNode.expression());
return selectClause;
}
@Override
public BLangNode transform(OnConflictClauseNode onConflictClauseNode) {
BLangOnConflictClause onConflictClause = (BLangOnConflictClause) TreeBuilder.createOnConflictClauseNode();
onConflictClause.pos = getPosition(onConflictClauseNode);
onConflictClause.expression = createExpression(onConflictClauseNode.expression());
return onConflictClause;
}
@Override
public BLangNode transform(LimitClauseNode limitClauseNode) {
BLangLimitClause selectClause = (BLangLimitClause) TreeBuilder.createLimitClauseNode();
selectClause.pos = getPosition(limitClauseNode);
selectClause.expression = createExpression(limitClauseNode.expression());
return selectClause;
}
@Override
public BLangNode transform(OnClauseNode onClauseNode) {
BLangOnClause onClause = (BLangOnClause) TreeBuilder.createOnClauseNode();
onClause.pos = getPosition(onClauseNode);
onClause.lhsExpr = createExpression(onClauseNode.lhsExpression());
onClause.rhsExpr = createExpression(onClauseNode.rhsExpression());
return onClause;
}
@Override
public BLangNode transform(JoinClauseNode joinClauseNode) {
BLangJoinClause joinClause = (BLangJoinClause) TreeBuilder.createJoinClauseNode();
joinClause.pos = getPosition(joinClauseNode);
TypedBindingPatternNode typedBindingPattern = joinClauseNode.typedBindingPattern();
joinClause.variableDefinitionNode = createBLangVarDef(getPosition(joinClauseNode),
typedBindingPattern, Optional.empty(), Optional.empty());
joinClause.collection = createExpression(joinClauseNode.expression());
joinClause.isDeclaredWithVar = typedBindingPattern.typeDescriptor().kind() == SyntaxKind.VAR_TYPE_DESC;
joinClause.isOuterJoin = joinClauseNode.outerKeyword().isPresent();
OnClauseNode onClauseNode = joinClauseNode.joinOnCondition();
BLangOnClause onClause = (BLangOnClause) TreeBuilder.createOnClauseNode();
onClause.pos = getPosition(onClauseNode);
if (!onClauseNode.equalsKeyword().isMissing()) {
onClause.equalsKeywordPos = getPosition(onClauseNode.equalsKeyword());
}
onClause.lhsExpr = createExpression(onClauseNode.lhsExpression());
onClause.rhsExpr = createExpression(onClauseNode.rhsExpression());
joinClause.onClause = onClause;
return joinClause;
}
@Override
public BLangNode transform(OrderByClauseNode orderByClauseNode) {
BLangOrderByClause orderByClause = (BLangOrderByClause) TreeBuilder.createOrderByClauseNode();
orderByClause.pos = getPosition(orderByClauseNode);
for (OrderKeyNode orderKeyNode : orderByClauseNode.orderKey()) {
orderByClause.addOrderKey(createOrderKey(orderKeyNode));
}
return orderByClause;
}
public BLangOrderKey createOrderKey(OrderKeyNode orderKeyNode) {
BLangOrderKey orderKey = (BLangOrderKey) TreeBuilder.createOrderKeyNode();
orderKey.pos = getPosition(orderKeyNode);
orderKey.expression = createExpression(orderKeyNode.expression());
if (orderKeyNode.orderDirection().isPresent() &&
orderKeyNode.orderDirection().get().text().equals("descending")) {
orderKey.isAscending = false;
} else {
orderKey.isAscending = true;
}
return orderKey;
}
@Override
public BLangNode transform(IntersectionTypeDescriptorNode intersectionTypeDescriptorNode) {
BLangType lhsType = (BLangType) createTypeNode(intersectionTypeDescriptorNode.leftTypeDesc());
BLangType rhsType = (BLangType) createTypeNode(intersectionTypeDescriptorNode.rightTypeDesc());
BLangIntersectionTypeNode intersectionType;
if (rhsType.getKind() == NodeKind.INTERSECTION_TYPE_NODE) {
intersectionType = (BLangIntersectionTypeNode) rhsType;
intersectionType.constituentTypeNodes.add(0, lhsType);
} else if (lhsType.getKind() == NodeKind.INTERSECTION_TYPE_NODE) {
intersectionType = (BLangIntersectionTypeNode) lhsType;
intersectionType.constituentTypeNodes.add(rhsType);
} else {
intersectionType = (BLangIntersectionTypeNode) TreeBuilder.createIntersectionTypeNode();
intersectionType.constituentTypeNodes.add(lhsType);
intersectionType.constituentTypeNodes.add(rhsType);
}
intersectionType.pos = getPosition(intersectionTypeDescriptorNode);
return intersectionType;
}
@Override
public BLangNode transform(InferredTypedescDefaultNode inferDefaultValueNode) {
BLangInferredTypedescDefaultNode inferTypedescExpr =
(BLangInferredTypedescDefaultNode) TreeBuilder.createInferTypedescExpressionNode();
inferTypedescExpr.pos = getPosition(inferDefaultValueNode);
return inferTypedescExpr;
}
@Override
protected BLangNode transformSyntaxNode(Node node) {
throw new RuntimeException("Node not supported: " + node.getClass().getSimpleName());
}
@Override
public BLangNode transform(ServiceDeclarationNode serviceDeclarationNode) {
Location pos = getPositionWithoutMetadata(serviceDeclarationNode);
BLangClassDefinition anonClassDef = transformObjectCtorExpressionBody(serviceDeclarationNode.members());
anonClassDef.isServiceDecl = true;
anonClassDef.pos = pos;
anonClassDef.flagSet.add(SERVICE);
setClassQualifiers(serviceDeclarationNode.qualifiers(), anonClassDef);
List<IdentifierNode> absResourcePathPath = new ArrayList<>();
NodeList<Node> pathList = serviceDeclarationNode.absoluteResourcePath();
BLangLiteral serviceNameLiteral = null;
if (pathList.size() == 1 && pathList.get(0).kind() == SyntaxKind.STRING_LITERAL) {
serviceNameLiteral = (BLangLiteral) createExpression(pathList.get(0));
} else {
for (var token : pathList) {
String text = ((Token) token).text();
if (pathList.size() == 1 && text.equals("/")) {
absResourcePathPath.add(createIdentifier((Token) token));
} else if (!text.equals("/")) {
absResourcePathPath.add(createIdentifier((Token) token));
}
}
}
String genName = anonymousModelHelper.getNextAnonymousTypeKey(packageID);
IdentifierNode anonTypeGenName = createIdentifier(pos, genName);
anonClassDef.setName(anonTypeGenName);
anonClassDef.flagSet.add(Flag.PUBLIC);
Optional<TypeDescriptorNode> typeReference = serviceDeclarationNode.typeDescriptor();
typeReference.ifPresent(typeReferenceNode -> {
BLangType typeNode = createTypeNode(typeReferenceNode);
anonClassDef.typeRefs.add(typeNode);
});
anonClassDef.annAttachments = applyAll(getAnnotations(serviceDeclarationNode.metadata()));
anonClassDef.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(serviceDeclarationNode.metadata()));
addToTop(anonClassDef);
BLangIdentifier identifier = (BLangIdentifier) TreeBuilder.createIdentifierNode();
BLangUserDefinedType userDefinedType = createUserDefinedType(pos, identifier, anonClassDef.name);
BLangTypeInit initNode = (BLangTypeInit) TreeBuilder.createInitNode();
initNode.pos = pos;
initNode.userDefinedType = userDefinedType;
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier pkgAlias = createIdentifier(pos, "");
BLangNameReference nameReference = new BLangNameReference(pos, null, pkgAlias, createIdentifier(pos, genName));
invocationNode.name = (BLangIdentifier) nameReference.name;
invocationNode.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
initNode.argsExpr.addAll(invocationNode.argExprs);
initNode.initInvocation = invocationNode;
BLangSimpleVariable serviceVariable = createServiceVariable(pos, anonClassDef, initNode);
List<BLangExpression> exprs = new ArrayList<>();
for (var exp : serviceDeclarationNode.expressions()) {
exprs.add(createExpression(exp));
}
BLangService service = (BLangService) TreeBuilder.createServiceNode();
service.serviceVariable = serviceVariable;
service.attachedExprs = exprs;
service.serviceClass = anonClassDef;
service.absoluteResourcePath = absResourcePathPath;
service.serviceNameLiteral = serviceNameLiteral;
service.annAttachments = anonClassDef.annAttachments;
service.pos = pos;
service.name = createIdentifier(pos, anonymousModelHelper.getNextAnonymousServiceVarKey(packageID));
return service;
}
private BLangSimpleVariable createServiceVariable(Location pos, BLangClassDefinition annonClassDef,
BLangTypeInit initNode) {
BLangUserDefinedType typeName = createUserDefinedType(pos,
(BLangIdentifier) TreeBuilder.createIdentifierNode(), annonClassDef.name);
BLangSimpleVariable serviceInstance =
(BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
serviceInstance.typeNode = typeName;
String serviceVarName = anonymousModelHelper.getNextAnonymousServiceVarKey(packageID);
serviceInstance.name = createIdentifier(pos, serviceVarName);
serviceInstance.expr = initNode;
serviceInstance.internal = true;
return serviceInstance;
}
@Override
public BLangNode transform(ClassDefinitionNode classDefinitionNode) {
BLangClassDefinition blangClass = (BLangClassDefinition) TreeBuilder.createClassDefNode();
blangClass.pos = getPositionWithoutMetadata(classDefinitionNode);
blangClass.annAttachments = applyAll(getAnnotations(classDefinitionNode.metadata()));
BLangIdentifier identifierNode = createIdentifier(classDefinitionNode.className());
blangClass.setName(identifierNode);
blangClass.markdownDocumentationAttachment =
createMarkdownDocumentationAttachment(getDocumentationString(classDefinitionNode.metadata()));
classDefinitionNode.visibilityQualifier().ifPresent(visibilityQual -> {
if (visibilityQual.kind() == SyntaxKind.PUBLIC_KEYWORD) {
blangClass.flagSet.add(Flag.PUBLIC);
}
});
setClassQualifiers(classDefinitionNode.classTypeQualifiers(), blangClass);
NodeList<Node> members = classDefinitionNode.members();
for (Node node : members) {
BLangNode bLangNode = node.apply(this);
if (bLangNode.getKind() == NodeKind.FUNCTION || bLangNode.getKind() == NodeKind.RESOURCE_FUNC) {
BLangFunction bLangFunction = (BLangFunction) bLangNode;
bLangFunction.attachedFunction = true;
bLangFunction.flagSet.add(Flag.ATTACHED);
if (Names.USER_DEFINED_INIT_SUFFIX.value.equals(bLangFunction.name.value)) {
if (blangClass.initFunction == null) {
bLangFunction.objInitFunction = true;
blangClass.initFunction = bLangFunction;
} else {
blangClass.addFunction(bLangFunction);
}
} else {
blangClass.addFunction(bLangFunction);
}
} else if (bLangNode.getKind() == NodeKind.VARIABLE) {
blangClass.addField((BLangSimpleVariable) bLangNode);
} else if (bLangNode.getKind() == NodeKind.USER_DEFINED_TYPE) {
blangClass.addTypeReference((BLangType) bLangNode);
}
}
return blangClass;
}
@Override
public BLangNode transform(RetryStatementNode retryStatementNode) {
BLangRetrySpec retrySpec = createRetrySpec(retryStatementNode);
Location pos = getPosition(retryStatementNode);
StatementNode retryBody = retryStatementNode.retryBody();
if (retryBody.kind() == SyntaxKind.TRANSACTION_STATEMENT) {
BLangRetryTransaction retryTransaction = (BLangRetryTransaction) TreeBuilder.createRetryTransactionNode();
retryTransaction.pos = pos;
retryTransaction.setRetrySpec(retrySpec);
retryTransaction.setTransaction((BLangTransaction) retryBody.apply(this));
return retryTransaction;
}
BLangRetry retryNode = (BLangRetry) TreeBuilder.createRetryNode();
retryNode.pos = pos;
retryNode.setRetrySpec(retrySpec);
BLangBlockStmt retryBlock = (BLangBlockStmt) retryBody.apply(this);
retryNode.setRetryBody(retryBlock);
retryStatementNode.onFailClause().ifPresent(onFailClauseNode -> {
retryNode.setOnFailClause(
(org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this)));
});
return retryNode;
}
private BLangRetrySpec createRetrySpec(RetryStatementNode retryStatementNode) {
BLangRetrySpec retrySpec = (BLangRetrySpec) TreeBuilder.createRetrySpecNode();
if (retryStatementNode.typeParameter().isPresent()) {
TypeParameterNode typeParam = retryStatementNode.typeParameter().get();
retrySpec.retryManagerType = createTypeNode(typeParam.typeNode());
retrySpec.pos = getPosition(typeParam);
}
if (retryStatementNode.arguments().isPresent()) {
ParenthesizedArgList arg = retryStatementNode.arguments().get();
retrySpec.pos = getPosition(arg);
for (Node argNode : arg.arguments()) {
retrySpec.argExprs.add(createExpression(argNode));
}
}
if (retrySpec.pos == null) {
retrySpec.pos = getPosition(retryStatementNode);
}
return retrySpec;
}
@Override
public BLangNode transform(TransactionalExpressionNode transactionalExpressionNode) {
BLangTransactionalExpr transactionalExpr = TreeBuilder.createTransactionalExpressionNode();
transactionalExpr.pos = getPosition(transactionalExpressionNode);
return transactionalExpr;
}
@Override
public BLangNode transform(XMLFilterExpressionNode xmlFilterExpressionNode) {
List<BLangXMLElementFilter> filters = new ArrayList<>();
XMLNamePatternChainingNode xmlNamePatternChainingNode = xmlFilterExpressionNode.xmlPatternChain();
for (Node node : xmlNamePatternChainingNode.xmlNamePattern()) {
filters.add(createXMLElementFilter(node));
}
BLangExpression expr = createExpression(xmlFilterExpressionNode.expression());
BLangXMLElementAccess elementAccess = new BLangXMLElementAccess(getPosition(xmlFilterExpressionNode), null,
expr, filters);
return elementAccess;
}
@Override
public BLangNode transform(XMLStepExpressionNode xmlStepExpressionNode) {
List<BLangXMLElementFilter> filters = new ArrayList<>();
int starCount = 0;
if (xmlStepExpressionNode.xmlStepStart().kind() == SyntaxKind.SLASH_ASTERISK_TOKEN) {
starCount = 1;
} else if (xmlStepExpressionNode.xmlStepStart().kind() == SyntaxKind.XML_NAME_PATTERN_CHAIN) {
XMLNamePatternChainingNode xmlNamePatternChainingNode =
(XMLNamePatternChainingNode) xmlStepExpressionNode.xmlStepStart();
for (Node node : xmlNamePatternChainingNode.xmlNamePattern()) {
filters.add(createXMLElementFilter(node));
}
switch (xmlNamePatternChainingNode.startToken().kind()) {
case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:
starCount = 2;
break;
case SLASH_ASTERISK_TOKEN:
starCount = 1;
break;
}
}
BLangExpression expr = createExpression(xmlStepExpressionNode.expression());
BLangXMLNavigationAccess xmlNavigationAccess =
new BLangXMLNavigationAccess(getPosition(xmlStepExpressionNode), null, expr, filters,
XMLNavigationAccess.NavAccessType.fromInt(starCount), null);
return xmlNavigationAccess;
}
@Override
public BLangNode transform(MatchStatementNode matchStatementNode) {
BLangMatchStatement matchStatement = (BLangMatchStatement) TreeBuilder.createMatchStatementNode();
BLangExpression matchStmtExpr = createExpression(matchStatementNode.condition());
matchStatement.setExpression(matchStmtExpr);
for (MatchClauseNode matchClauseNode : matchStatementNode.matchClauses()) {
BLangMatchClause bLangMatchClause = (BLangMatchClause) TreeBuilder.createMatchClause();
bLangMatchClause.pos = getPosition(matchClauseNode);
bLangMatchClause.expr = matchStmtExpr;
boolean matchGuardAvailable = false;
if (matchClauseNode.matchGuard().isPresent()) {
matchGuardAvailable = true;
BLangMatchGuard bLangMatchGuard = (BLangMatchGuard) TreeBuilder.createMatchGuard();
bLangMatchGuard.expr = createExpression(matchClauseNode.matchGuard().get().expression());
bLangMatchGuard.pos = getPosition(matchClauseNode.matchGuard().get());
bLangMatchClause.setMatchGuard(bLangMatchGuard);
}
for (Node matchPattern : matchClauseNode.matchPatterns()) {
BLangMatchPattern bLangMatchPattern = transformMatchPattern(matchPattern);
if (bLangMatchPattern != null) {
bLangMatchPattern.matchExpr = matchStmtExpr;
bLangMatchPattern.matchGuardIsAvailable = matchGuardAvailable;
bLangMatchClause.addMatchPattern(bLangMatchPattern);
}
}
bLangMatchClause.setBlockStatement((BLangBlockStmt) transform(matchClauseNode.blockStatement()));
matchStatement.addMatchClause(bLangMatchClause);
}
matchStatementNode.onFailClause().ifPresent(onFailClauseNode -> {
matchStatement.setOnFailClause(
(org.ballerinalang.model.clauses.OnFailClauseNode) (onFailClauseNode.apply(this)));
});
matchStatement.pos = getPosition(matchStatementNode);
return matchStatement;
}
private BLangXMLSequenceLiteral createXmlSequence(TemplateExpressionNode expressionNode) {
BLangXMLSequenceLiteral xmlSequenceLiteral = (BLangXMLSequenceLiteral)
TreeBuilder.createXMLSequenceLiteralNode();
xmlSequenceLiteral.pos = getPosition(expressionNode);
Node lastNode = null;
List<Node> adjacentTextNodes = new ArrayList<>();
int xmlContentSize = expressionNode.content().size();
for (int index = 0; index < xmlContentSize; index++) {
Node childItem = expressionNode.content().get(index);
if (childItem.kind() == SyntaxKind.XML_TEXT || childItem.kind() == SyntaxKind.INTERPOLATION) {
adjacentTextNodes.add(childItem);
lastNode = childItem;
if (index != xmlContentSize - 1) {
continue;
}
}
if (lastNode != null && (lastNode.kind() == SyntaxKind.XML_TEXT ||
lastNode.kind() == SyntaxKind.INTERPOLATION)) {
if (adjacentTextNodes.size() > 1) {
xmlSequenceLiteral.xmlItems.add((BLangExpression) createXMLTextLiteral(adjacentTextNodes));
} else {
xmlSequenceLiteral.xmlItems.add(createXmlSingletonItem(lastNode));
}
adjacentTextNodes.clear();
if (lastNode.kind() == childItem.kind()) {
continue;
}
}
xmlSequenceLiteral.xmlItems.add(createXmlSingletonItem(childItem));
lastNode = childItem;
}
return xmlSequenceLiteral;
}
public BLangExpression createXmlSingletonItem(Node xmlTypeNode) {
switch (xmlTypeNode.kind()) {
case XML_COMMENT:
case XML_PI:
case XML_ELEMENT:
case XML_EMPTY_ELEMENT:
return createExpression(xmlTypeNode);
default:
return (BLangExpression) createXMLTextLiteral(xmlTypeNode);
}
}
public BLangNode createXmlTemplateLiteral(TemplateExpressionNode expressionNode) {
if (expressionNode.content().isEmpty()) {
return createXMLEmptyLiteral(expressionNode);
}
if (expressionNode.content().size() == 1) {
return createXmlSingletonItem(expressionNode.content().get(0));
}
return createXmlSequence(expressionNode);
}
private BLangMatchPattern transformMatchPattern(Node matchPattern) {
Location matchPatternPos = matchPattern.location();
SyntaxKind kind = matchPattern.kind();
if (kind == SyntaxKind.SIMPLE_NAME_REFERENCE &&
((SimpleNameReferenceNode) matchPattern).name().text().equals("_")) {
BLangWildCardMatchPattern bLangWildCardMatchPattern =
(BLangWildCardMatchPattern) TreeBuilder.createWildCardMatchPattern();
bLangWildCardMatchPattern.pos = matchPatternPos;
return bLangWildCardMatchPattern;
}
if (kind == SyntaxKind.IDENTIFIER_TOKEN && ((IdentifierToken) matchPattern).text().equals("_")) {
BLangWildCardMatchPattern bLangWildCardMatchPattern =
(BLangWildCardMatchPattern) TreeBuilder.createWildCardMatchPattern();
bLangWildCardMatchPattern.pos = matchPatternPos;
return bLangWildCardMatchPattern;
}
if (kind == SyntaxKind.TYPED_BINDING_PATTERN) {
TypedBindingPatternNode typedBindingPatternNode = (TypedBindingPatternNode) matchPattern;
BLangVarBindingPatternMatchPattern bLangVarBindingPattern =
(BLangVarBindingPatternMatchPattern) TreeBuilder.createVarBindingPattern();
bLangVarBindingPattern.pos = matchPatternPos;
bLangVarBindingPattern.setBindingPattern(transformBindingPattern(typedBindingPatternNode.bindingPattern()));
return bLangVarBindingPattern;
}
if (kind == SyntaxKind.ERROR_MATCH_PATTERN) {
return transformErrorMatchPattern((ErrorMatchPatternNode) matchPattern, matchPatternPos);
}
if (kind == SyntaxKind.NAMED_ARG_MATCH_PATTERN) {
return transformNamedArgMatchPattern((NamedArgMatchPatternNode) matchPattern, matchPatternPos);
}
if (kind == SyntaxKind.LIST_MATCH_PATTERN) {
return transformListMatchPattern((ListMatchPatternNode) matchPattern, matchPatternPos);
}
if (kind == SyntaxKind.REST_MATCH_PATTERN) {
return transformRestMatchPattern((RestMatchPatternNode) matchPattern, matchPatternPos);
}
if (kind == SyntaxKind.MAPPING_MATCH_PATTERN) {
return transformMappingMatchPattern((MappingMatchPatternNode) matchPattern, matchPatternPos);
}
if (kind == SyntaxKind.FIELD_MATCH_PATTERN) {
return transformFieldMatchPattern((FieldMatchPatternNode) matchPattern, matchPatternPos);
}
assert (kind == SyntaxKind.NUMERIC_LITERAL ||
kind == SyntaxKind.STRING_LITERAL ||
kind == SyntaxKind.SIMPLE_NAME_REFERENCE ||
kind == SyntaxKind.QUALIFIED_NAME_REFERENCE ||
kind == SyntaxKind.IDENTIFIER_TOKEN ||
kind == SyntaxKind.NULL_LITERAL ||
kind == SyntaxKind.NIL_LITERAL ||
kind == SyntaxKind.BOOLEAN_LITERAL ||
kind == SyntaxKind.UNARY_EXPRESSION);
BLangConstPattern bLangConstMatchPattern = (BLangConstPattern) TreeBuilder.createConstMatchPattern();
bLangConstMatchPattern.setExpression(createExpression(matchPattern));
bLangConstMatchPattern.pos = matchPatternPos;
return bLangConstMatchPattern;
}
private BLangErrorMatchPattern transformErrorMatchPattern(ErrorMatchPatternNode errorMatchPatternNode,
Location pos) {
BLangErrorMatchPattern bLangErrorMatchPattern =
(BLangErrorMatchPattern) TreeBuilder.createErrorMatchPattern();
bLangErrorMatchPattern.pos = pos;
NameReferenceNode nameReferenceNode;
if (errorMatchPatternNode.typeReference().isPresent()) {
nameReferenceNode = errorMatchPatternNode.typeReference().get();
bLangErrorMatchPattern.errorTypeReference = (BLangUserDefinedType) createTypeNode(nameReferenceNode);
}
if (errorMatchPatternNode.argListMatchPatternNode().size() == 0) {
return bLangErrorMatchPattern;
}
Node node = errorMatchPatternNode.argListMatchPatternNode().get(0);
if (isErrorFieldMatchPattern(node)) {
createErrorFieldMatchPatterns(0, errorMatchPatternNode, bLangErrorMatchPattern);
return bLangErrorMatchPattern;
}
bLangErrorMatchPattern.errorMessageMatchPattern = createErrorMessageMatchPattern(node);
if (errorMatchPatternNode.argListMatchPatternNode().size() == 1) {
return bLangErrorMatchPattern;
}
node = errorMatchPatternNode.argListMatchPatternNode().get(1);
if (isErrorFieldMatchPattern(node)) {
createErrorFieldMatchPatterns(1, errorMatchPatternNode, bLangErrorMatchPattern);
return bLangErrorMatchPattern;
}
bLangErrorMatchPattern.errorCauseMatchPattern = createErrorCauseMatchPattern(node);
createErrorFieldMatchPatterns(2, errorMatchPatternNode, bLangErrorMatchPattern);
return bLangErrorMatchPattern;
}
private BLangNamedArgMatchPattern transformNamedArgMatchPattern(NamedArgMatchPatternNode namedArgMatchPatternNode,
Location pos) {
BLangNamedArgMatchPattern bLangNamedArgMatchPattern =
(BLangNamedArgMatchPattern) TreeBuilder.createNamedArgMatchPattern();
bLangNamedArgMatchPattern.argName = createIdentifier(namedArgMatchPatternNode.identifier());
bLangNamedArgMatchPattern.matchPattern = transformMatchPattern(namedArgMatchPatternNode.matchPattern());
bLangNamedArgMatchPattern.pos = pos;
return bLangNamedArgMatchPattern;
}
private BLangListMatchPattern transformListMatchPattern(ListMatchPatternNode listMatchPatternNode,
Location pos) {
BLangListMatchPattern bLangListMatchPattern =
(BLangListMatchPattern) TreeBuilder.createListMatchPattern();
bLangListMatchPattern.pos = pos;
SeparatedNodeList<Node> matchPatterns = listMatchPatternNode.matchPatterns();
int matchPatternListSize = matchPatterns.size();
if (matchPatternListSize == 0) {
return bLangListMatchPattern;
}
for (int i = 0; i < matchPatternListSize - 1; i++) {
BLangMatchPattern bLangMemberMatchPattern = transformMatchPattern(matchPatterns.get(i));
if (bLangMemberMatchPattern == null) {
continue;
}
bLangListMatchPattern.addMatchPattern(bLangMemberMatchPattern);
}
BLangMatchPattern lastMember = transformMatchPattern(matchPatterns.get(matchPatternListSize - 1));
if (lastMember.getKind() == NodeKind.REST_MATCH_PATTERN) {
bLangListMatchPattern.setRestMatchPattern((BLangRestMatchPattern) lastMember);
} else {
bLangListMatchPattern.addMatchPattern(lastMember);
}
return bLangListMatchPattern;
}
private BLangRestMatchPattern transformRestMatchPattern(RestMatchPatternNode restMatchPatternNode, Location pos) {
BLangRestMatchPattern bLangRestMatchPattern = (BLangRestMatchPattern) TreeBuilder.createRestMatchPattern();
bLangRestMatchPattern.pos = pos;
SimpleNameReferenceNode variableName = restMatchPatternNode.variableName();
bLangRestMatchPattern.setIdentifier(createIdentifier(getPosition(variableName), variableName.name()));
return bLangRestMatchPattern;
}
private BLangMappingMatchPattern transformMappingMatchPattern(MappingMatchPatternNode mappingMatchPatternNode,
Location pos) {
BLangMappingMatchPattern bLangMappingMatchPattern =
(BLangMappingMatchPattern) TreeBuilder.createMappingMatchPattern();
bLangMappingMatchPattern.pos = pos;
SeparatedNodeList<Node> fieldMatchPatterns = mappingMatchPatternNode.fieldMatchPatterns();
int fieldMatchPatternListSize = fieldMatchPatterns.size();
if (fieldMatchPatternListSize == 0) {
return bLangMappingMatchPattern;
}
for (int i = 0; i < fieldMatchPatternListSize - 1; i++) {
bLangMappingMatchPattern.fieldMatchPatterns.add((BLangFieldMatchPattern)
transformMatchPattern(fieldMatchPatterns.get(i)));
}
BLangMatchPattern lastMember = transformMatchPattern(fieldMatchPatterns.get(fieldMatchPatternListSize - 1));
if (lastMember.getKind() == NodeKind.REST_MATCH_PATTERN) {
bLangMappingMatchPattern.setRestMatchPattern((BLangRestMatchPattern) lastMember);
} else {
bLangMappingMatchPattern.addFieldMatchPattern((BLangFieldMatchPattern) lastMember);
}
return bLangMappingMatchPattern;
}
private BLangFieldMatchPattern transformFieldMatchPattern(FieldMatchPatternNode fieldMatchPatternNode,
Location pos) {
BLangFieldMatchPattern bLangFieldMatchPattern =
(BLangFieldMatchPattern) TreeBuilder.createFieldMatchPattern();
bLangFieldMatchPattern.pos = pos;
bLangFieldMatchPattern.fieldName =
createIdentifier(fieldMatchPatternNode.fieldNameNode());
bLangFieldMatchPattern.matchPattern = transformMatchPattern(fieldMatchPatternNode.matchPattern());
return bLangFieldMatchPattern;
}
private BLangBindingPattern transformBindingPattern(Node bindingPattern) {
Location pos = getPosition(bindingPattern);
SyntaxKind patternKind = bindingPattern.kind();
switch (patternKind) {
case CAPTURE_BINDING_PATTERN:
return transformCaptureBindingPattern((CaptureBindingPatternNode) bindingPattern, pos);
case LIST_BINDING_PATTERN:
return transformListBindingPattern((ListBindingPatternNode) bindingPattern, pos);
case NAMED_ARG_BINDING_PATTERN:
return transformNamedArgBindingPattern((NamedArgBindingPatternNode) bindingPattern, pos);
case REST_BINDING_PATTERN:
return transformRestBindingPattern((RestBindingPatternNode) bindingPattern, pos);
case MAPPING_BINDING_PATTERN:
return transformMappingBindingPattern((MappingBindingPatternNode) bindingPattern, pos);
case FIELD_BINDING_PATTERN:
return transformFieldBindingPattern(bindingPattern, pos);
case ERROR_BINDING_PATTERN:
return transformErrorBindingPattern((ErrorBindingPatternNode) bindingPattern, pos);
case WILDCARD_BINDING_PATTERN:
default:
assert patternKind == SyntaxKind.WILDCARD_BINDING_PATTERN;
return transformWildCardBindingPattern(pos);
}
}
private BLangWildCardBindingPattern transformWildCardBindingPattern(Location pos) {
BLangWildCardBindingPattern bLangWildCardBindingPattern =
(BLangWildCardBindingPattern) TreeBuilder.createWildCardBindingPattern();
bLangWildCardBindingPattern.pos = pos;
return bLangWildCardBindingPattern;
}
private BLangCaptureBindingPattern transformCaptureBindingPattern(CaptureBindingPatternNode captureBindingPattern,
Location pos) {
BLangCaptureBindingPattern bLangCaptureBindingPattern =
(BLangCaptureBindingPattern) TreeBuilder.createCaptureBindingPattern();
bLangCaptureBindingPattern.setIdentifier(createIdentifier(captureBindingPattern.variableName()));
bLangCaptureBindingPattern.pos = pos;
return bLangCaptureBindingPattern;
}
private BLangRestBindingPattern transformRestBindingPattern(RestBindingPatternNode restBindingPatternNode,
Location pos) {
BLangRestBindingPattern bLangRestBindingPattern =
(BLangRestBindingPattern) TreeBuilder.createRestBindingPattern();
bLangRestBindingPattern.pos = pos;
SimpleNameReferenceNode variableName = restBindingPatternNode.variableName();
bLangRestBindingPattern.setIdentifier(createIdentifier(getPosition(variableName), variableName.name()));
return bLangRestBindingPattern;
}
private BLangListBindingPattern transformListBindingPattern(ListBindingPatternNode listBindingPatternNode,
Location pos) {
BLangListBindingPattern bLangListBindingPattern =
(BLangListBindingPattern) TreeBuilder.createListBindingPattern();
bLangListBindingPattern.pos = pos;
for (Node listMemberBindingPattern : listBindingPatternNode.bindingPatterns()) {
if (listMemberBindingPattern.kind() != SyntaxKind.REST_BINDING_PATTERN) {
bLangListBindingPattern.addBindingPattern(transformBindingPattern(listMemberBindingPattern));
continue;
}
bLangListBindingPattern.restBindingPattern =
(BLangRestBindingPattern) transformBindingPattern(listMemberBindingPattern);
}
return bLangListBindingPattern;
}
private BLangMappingBindingPattern transformMappingBindingPattern(MappingBindingPatternNode
mappingBindingPatternNode,
Location pos) {
BLangMappingBindingPattern bLangMappingBindingPattern =
(BLangMappingBindingPattern) TreeBuilder.createMappingBindingPattern();
bLangMappingBindingPattern.pos = pos;
for (Node fieldBindingPattern : mappingBindingPatternNode.fieldBindingPatterns()) {
if (fieldBindingPattern.kind() == SyntaxKind.REST_BINDING_PATTERN) {
bLangMappingBindingPattern.restBindingPattern =
(BLangRestBindingPattern) transformBindingPattern(fieldBindingPattern);
continue;
}
bLangMappingBindingPattern.fieldBindingPatterns.add(
(BLangFieldBindingPattern) transformBindingPattern(fieldBindingPattern));
}
return bLangMappingBindingPattern;
}
private BLangFieldBindingPattern transformFieldBindingPattern(Node bindingPattern, Location pos) {
BLangFieldBindingPattern bLangFieldBindingPattern =
(BLangFieldBindingPattern) TreeBuilder.createFieldBindingPattern();
bLangFieldBindingPattern.pos = pos;
if (bindingPattern instanceof FieldBindingPatternVarnameNode) {
FieldBindingPatternVarnameNode fieldBindingPatternVarnameNode =
(FieldBindingPatternVarnameNode) bindingPattern;
BLangIdentifier fieldName = createIdentifier(fieldBindingPatternVarnameNode.variableName().name());
bLangFieldBindingPattern.fieldName = fieldName;
BLangCaptureBindingPattern bLangCaptureBindingPatternInFieldBindingPattern =
(BLangCaptureBindingPattern) TreeBuilder.createCaptureBindingPattern();
bLangCaptureBindingPatternInFieldBindingPattern.setIdentifier(fieldName);
bLangCaptureBindingPatternInFieldBindingPattern.pos = pos;
bLangFieldBindingPattern.bindingPattern = bLangCaptureBindingPatternInFieldBindingPattern;
return bLangFieldBindingPattern;
}
FieldBindingPatternFullNode fieldBindingPatternNode = (FieldBindingPatternFullNode) bindingPattern;
bLangFieldBindingPattern.fieldName = createIdentifier(fieldBindingPatternNode.variableName().name());
bLangFieldBindingPattern.bindingPattern =
transformBindingPattern(fieldBindingPatternNode.bindingPattern());
return bLangFieldBindingPattern;
}
private BLangNamedArgBindingPattern transformNamedArgBindingPattern(NamedArgBindingPatternNode
namedArgBindingPattern,
Location pos) {
BLangNamedArgBindingPattern bLangNamedArgBindingPattern =
(BLangNamedArgBindingPattern) TreeBuilder.createNamedArgBindingPattern();
bLangNamedArgBindingPattern.pos = pos;
bLangNamedArgBindingPattern.argName = createIdentifier(namedArgBindingPattern.argName());
bLangNamedArgBindingPattern.bindingPattern =
transformBindingPattern(namedArgBindingPattern.bindingPattern());
return bLangNamedArgBindingPattern;
}
private BLangErrorBindingPattern transformErrorBindingPattern(ErrorBindingPatternNode errorBindingPatternNode,
Location pos) {
BLangErrorBindingPattern bLangErrorBindingPattern =
(BLangErrorBindingPattern) TreeBuilder.createErrorBindingPattern();
bLangErrorBindingPattern.pos = pos;
if (errorBindingPatternNode.typeReference().isPresent()) {
Node nameReferenceNode = errorBindingPatternNode.typeReference().get();
bLangErrorBindingPattern.errorTypeReference =
(BLangUserDefinedType) createTypeNode(nameReferenceNode);
}
if (errorBindingPatternNode.argListBindingPatterns().size() == 0) {
return bLangErrorBindingPattern;
}
Node node = errorBindingPatternNode.argListBindingPatterns().get(0);
if (isErrorFieldBindingPattern(node)) {
createErrorFieldBindingPatterns(0, errorBindingPatternNode, bLangErrorBindingPattern);
return bLangErrorBindingPattern;
}
bLangErrorBindingPattern.errorMessageBindingPattern = createErrorMessageBindingPattern(node);
if (errorBindingPatternNode.argListBindingPatterns().size() == 1) {
return bLangErrorBindingPattern;
}
node = errorBindingPatternNode.argListBindingPatterns().get(1);
if (isErrorFieldBindingPattern(node)) {
createErrorFieldBindingPatterns(1, errorBindingPatternNode, bLangErrorBindingPattern);
return bLangErrorBindingPattern;
}
bLangErrorBindingPattern.errorCauseBindingPattern = createErrorCauseBindingPattern(node);
createErrorFieldBindingPatterns(2, errorBindingPatternNode, bLangErrorBindingPattern);
return bLangErrorBindingPattern;
}
private boolean isErrorFieldMatchPattern(Node node) {
return node.kind() == SyntaxKind.NAMED_ARG_MATCH_PATTERN || node.kind() == SyntaxKind.REST_MATCH_PATTERN;
}
private boolean isErrorFieldBindingPattern(Node node) {
return node.kind() == SyntaxKind.NAMED_ARG_BINDING_PATTERN || node.kind() == SyntaxKind.REST_BINDING_PATTERN;
}
private BLangErrorMessageMatchPattern createErrorMessageMatchPattern(Node node) {
BLangMatchPattern matchPattern = transformMatchPattern(node);
BLangErrorMessageMatchPattern bLangErrorMessageMatchPattern =
(BLangErrorMessageMatchPattern) TreeBuilder.createErrorMessageMatchPattern();
bLangErrorMessageMatchPattern.pos = getPosition(node);
bLangErrorMessageMatchPattern.simpleMatchPattern = createSimpleMatchPattern(matchPattern);
return bLangErrorMessageMatchPattern;
}
private BLangErrorMessageBindingPattern createErrorMessageBindingPattern(Node node) {
BLangBindingPattern bindingPattern = transformBindingPattern(node);
BLangErrorMessageBindingPattern bLangErrorMessageBindingPattern =
(BLangErrorMessageBindingPattern) TreeBuilder.createErrorMessageBindingPattern();
bLangErrorMessageBindingPattern.pos = getPosition(node);
bLangErrorMessageBindingPattern.simpleBindingPattern = createSimpleBindingPattern(bindingPattern);
return bLangErrorMessageBindingPattern;
}
private BLangErrorCauseMatchPattern createErrorCauseMatchPattern(Node node) {
BLangMatchPattern matchPattern = transformMatchPattern(node);
BLangErrorCauseMatchPattern bLangErrorCauseMatchPattern =
(BLangErrorCauseMatchPattern) TreeBuilder.createErrorCauseMatchPattern();
bLangErrorCauseMatchPattern.pos = getPosition(node);
if (matchPattern.getKind() == NodeKind.ERROR_MATCH_PATTERN) {
bLangErrorCauseMatchPattern.errorMatchPattern = (BLangErrorMatchPattern) matchPattern;
return bLangErrorCauseMatchPattern;
}
bLangErrorCauseMatchPattern.simpleMatchPattern = createSimpleMatchPattern(matchPattern);
return bLangErrorCauseMatchPattern;
}
private BLangErrorCauseBindingPattern createErrorCauseBindingPattern(Node node) {
BLangBindingPattern bindingPattern = transformBindingPattern(node);
BLangErrorCauseBindingPattern bLangErrorCauseBindingPattern =
(BLangErrorCauseBindingPattern) TreeBuilder.createErrorCauseBindingPattern();
bLangErrorCauseBindingPattern.pos = getPosition(node);
if (bindingPattern.getKind() == NodeKind.ERROR_BINDING_PATTERN) {
bLangErrorCauseBindingPattern.errorBindingPattern = (BLangErrorBindingPattern) bindingPattern;
return bLangErrorCauseBindingPattern;
}
bLangErrorCauseBindingPattern.simpleBindingPattern = createSimpleBindingPattern(bindingPattern);
return bLangErrorCauseBindingPattern;
}
private BLangErrorFieldMatchPatterns createErrorFieldMatchPattern(Node errorFieldMatchPatternNode,
BLangErrorFieldMatchPatterns bLangErrorFieldMatchPatterns) {
BLangMatchPattern matchPattern = transformMatchPattern(errorFieldMatchPatternNode);
bLangErrorFieldMatchPatterns.pos = getPosition(errorFieldMatchPatternNode);
if (matchPattern.getKind() == NodeKind.NAMED_ARG_MATCH_PATTERN) {
bLangErrorFieldMatchPatterns.addNamedArgMatchPattern(
(org.ballerinalang.model.tree.matchpatterns.NamedArgMatchPatternNode) matchPattern);
} else if (matchPattern.getKind() == NodeKind.REST_MATCH_PATTERN) {
bLangErrorFieldMatchPatterns.restMatchPattern = (BLangRestMatchPattern) matchPattern;
}
return bLangErrorFieldMatchPatterns;
}
private BLangErrorFieldBindingPatterns createErrorFieldBindingPattern(Node errorFieldBindingPatternNode,
BLangErrorFieldBindingPatterns
bLangErrorFieldBindingPatterns) {
BLangBindingPattern bindingPattern = transformBindingPattern(errorFieldBindingPatternNode);
bLangErrorFieldBindingPatterns.pos = getPosition(errorFieldBindingPatternNode);
if (bindingPattern.getKind() == NodeKind.NAMED_ARG_BINDING_PATTERN) {
bLangErrorFieldBindingPatterns.
addNamedArgBindingPattern(
(org.ballerinalang.model.tree.bindingpattern.NamedArgBindingPatternNode) bindingPattern);
} else if (bindingPattern.getKind() == NodeKind.REST_BINDING_PATTERN) {
bLangErrorFieldBindingPatterns.restBindingPattern = (BLangRestBindingPattern) bindingPattern;
}
return bLangErrorFieldBindingPatterns;
}
private void createErrorFieldMatchPatterns(int index, ErrorMatchPatternNode errorMatchPatternNode,
BLangErrorMatchPattern bLangErrorMatchPattern) {
BLangErrorFieldMatchPatterns bLangErrorFieldMatchPatterns =
(BLangErrorFieldMatchPatterns) TreeBuilder.createErrorFieldMatchPattern();
for (int i = index; i < errorMatchPatternNode.argListMatchPatternNode().size(); i++) {
Node errorFieldMatchPatternNode = errorMatchPatternNode.argListMatchPatternNode().get(i);
bLangErrorMatchPattern.errorFieldMatchPatterns = createErrorFieldMatchPattern(errorFieldMatchPatternNode,
bLangErrorFieldMatchPatterns);
}
}
private void createErrorFieldBindingPatterns(int index, ErrorBindingPatternNode errorBindingPatternNode,
BLangErrorBindingPattern bLangErrorBindingPattern) {
BLangErrorFieldBindingPatterns bLangErrorFieldBindingPatterns =
(BLangErrorFieldBindingPatterns) TreeBuilder.createErrorFieldBindingPattern();
for (int i = index; i < errorBindingPatternNode.argListBindingPatterns().size(); i++) {
Node errorFieldBindingPatternNode = errorBindingPatternNode.argListBindingPatterns().get(i);
bLangErrorBindingPattern.errorFieldBindingPatterns =
createErrorFieldBindingPattern(errorFieldBindingPatternNode, bLangErrorFieldBindingPatterns);
}
}
private BLangSimpleMatchPattern createSimpleMatchPattern(BLangNode bLangNode) {
BLangSimpleMatchPattern bLangSimpleMatchPattern =
(BLangSimpleMatchPattern) TreeBuilder.createSimpleMatchPattern();
NodeKind kind = bLangNode.getKind();
switch (kind) {
case WILDCARD_MATCH_PATTERN:
bLangSimpleMatchPattern.wildCardMatchPattern = (BLangWildCardMatchPattern) bLangNode;
break;
case CONST_MATCH_PATTERN:
bLangSimpleMatchPattern.constPattern = (BLangConstPattern) bLangNode;
break;
case VAR_BINDING_PATTERN_MATCH_PATTERN:
bLangSimpleMatchPattern.varVariableName = (BLangVarBindingPatternMatchPattern) bLangNode;
break;
}
return bLangSimpleMatchPattern;
}
private BLangCaptureBindingPattern createCaptureBindingPattern(CaptureBindingPatternNode
captureBindingPatternNode) {
BLangCaptureBindingPattern bLangCaptureBindingPattern =
(BLangCaptureBindingPattern) TreeBuilder.createCaptureBindingPattern();
bLangCaptureBindingPattern.setIdentifier(createIdentifier(captureBindingPatternNode
.variableName()));
bLangCaptureBindingPattern.pos = getPosition(captureBindingPatternNode);
return bLangCaptureBindingPattern;
}
private BLangSimpleBindingPattern createSimpleBindingPattern(BLangNode bLangNode) {
BLangSimpleBindingPattern bLangSimpleBindingPattern =
(BLangSimpleBindingPattern) TreeBuilder.createSimpleBindingPattern();
NodeKind kind = bLangNode.getKind();
switch (kind) {
case WILDCARD_BINDING_PATTERN:
bLangSimpleBindingPattern.wildCardBindingPattern = (BLangWildCardBindingPattern) bLangNode;
break;
case CAPTURE_BINDING_PATTERN:
bLangSimpleBindingPattern.captureBindingPattern = (BLangCaptureBindingPattern) bLangNode;
break;
}
return bLangSimpleBindingPattern;
}
private BLangXMLElementFilter createXMLElementFilter(Node node) {
String ns = "";
String elementName = "*";
Location nsPos = null;
Location elemNamePos = null;
SyntaxKind kind = node.kind();
switch (kind) {
case SIMPLE_NAME_REFERENCE:
SimpleNameReferenceNode simpleNameReferenceNode = (SimpleNameReferenceNode) node;
elementName = simpleNameReferenceNode.name().text();
elemNamePos = getPosition(simpleNameReferenceNode);
break;
case QUALIFIED_NAME_REFERENCE:
QualifiedNameReferenceNode qualifiedNameReferenceNode = (QualifiedNameReferenceNode) node;
elementName = qualifiedNameReferenceNode.identifier().text();
elemNamePos = getPosition(qualifiedNameReferenceNode.identifier());
ns = qualifiedNameReferenceNode.modulePrefix().text();
nsPos = getPosition(qualifiedNameReferenceNode.modulePrefix());
break;
case XML_ATOMIC_NAME_PATTERN:
XMLAtomicNamePatternNode atomicNamePatternNode = (XMLAtomicNamePatternNode) node;
elementName = atomicNamePatternNode.name().text();
elemNamePos = getPosition(atomicNamePatternNode.name());
ns = atomicNamePatternNode.prefix().text();
nsPos = getPosition(atomicNamePatternNode.prefix());
break;
case ASTERISK_TOKEN:
elemNamePos = getPosition(node);
}
if (stringStartsWithSingleQuote(ns)) {
ns = ns.substring(1);
}
if (stringStartsWithSingleQuote(elementName)) {
elementName = elementName.substring(1);
}
return new BLangXMLElementFilter(getPosition(node), null, ns, nsPos, elementName, elemNamePos);
}
private boolean stringStartsWithSingleQuote(String ns) {
return ns != null && ns.length() > 0 && ns.charAt(0) == '\'';
}
private String getValueFromByteArrayNode(ByteArrayLiteralNode byteArrayLiteralNode) {
StringBuilder value = new StringBuilder();
value.append(byteArrayLiteralNode.type().text());
value.append(" ");
value.append("`");
if (byteArrayLiteralNode.content().isPresent()) {
value.append(byteArrayLiteralNode.content().get().text());
}
value.append("`");
return value.toString();
}
private BLangRecordVariable createBLangRecordVariable(MappingBindingPatternNode mappingBindingPatternNode) {
BLangRecordVariable recordVariable = (BLangRecordVariable) TreeBuilder.createRecordVariableNode();
List<BLangRecordVariableKeyValue> fieldBindingPatternsList = new ArrayList<>();
for (BindingPatternNode node : mappingBindingPatternNode.fieldBindingPatterns()) {
BLangRecordVariableKeyValue recordKeyValue = new BLangRecordVariableKeyValue();
if (node instanceof FieldBindingPatternFullNode) {
FieldBindingPatternFullNode fullNode = (FieldBindingPatternFullNode) node;
recordKeyValue.key = createIdentifier(fullNode.variableName().name());
recordKeyValue.valueBindingPattern = getBLangVariableNode(fullNode.bindingPattern());
} else if (node instanceof FieldBindingPatternVarnameNode) {
FieldBindingPatternVarnameNode varnameNode = (FieldBindingPatternVarnameNode) node;
recordKeyValue.key = createIdentifier(varnameNode.variableName().name());
BLangSimpleVariable value = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
value.pos = getPosition(varnameNode);
IdentifierNode name = createIdentifier(varnameNode.variableName().name());
((BLangIdentifier) name).pos = value.pos;
value.setName(name);
recordKeyValue.valueBindingPattern = value;
} else {
recordVariable.restParam = getBLangVariableNode(node);
break;
}
fieldBindingPatternsList.add(recordKeyValue);
}
recordVariable.variableList = fieldBindingPatternsList;
recordVariable.pos = getPosition(mappingBindingPatternNode);
return recordVariable;
}
private BLangLiteral createEmptyLiteral() {
BLangLiteral bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
bLiteral.value = "";
bLiteral.originalValue = "";
bLiteral.type = symTable.getTypeFromTag(TypeTags.STRING);
return bLiteral;
}
private BLangVariable createSimpleVariable(Location location,
Token identifier,
Location identifierPos) {
BLangSimpleVariable memberVar = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
memberVar.pos = location;
IdentifierNode name = createIdentifier(identifierPos, identifier);
((BLangIdentifier) name).pos = identifierPos;
memberVar.setName(name);
return memberVar;
}
private BLangVariable getBLangVariableNode(BindingPatternNode bindingPattern) {
Token varName;
switch (bindingPattern.kind()) {
case MAPPING_BINDING_PATTERN:
MappingBindingPatternNode mappingBindingPatternNode = (MappingBindingPatternNode) bindingPattern;
return createBLangRecordVariable(mappingBindingPatternNode);
case LIST_BINDING_PATTERN:
ListBindingPatternNode listBindingPatternNode = (ListBindingPatternNode) bindingPattern;
BLangTupleVariable tupleVariable = (BLangTupleVariable) TreeBuilder.createTupleVariableNode();
tupleVariable.pos = getPosition(listBindingPatternNode);
for (BindingPatternNode memberBindingPattern : listBindingPatternNode.bindingPatterns()) {
if (memberBindingPattern.kind() == SyntaxKind.REST_BINDING_PATTERN) {
tupleVariable.restVariable = getBLangVariableNode(memberBindingPattern);
} else {
BLangVariable member = getBLangVariableNode(memberBindingPattern);
tupleVariable.memberVariables.add(member);
}
}
return tupleVariable;
case ERROR_BINDING_PATTERN:
ErrorBindingPatternNode errorBindingPatternNode = (ErrorBindingPatternNode) bindingPattern;
BLangErrorVariable bLangErrorVariable = (BLangErrorVariable) TreeBuilder.createErrorVariableNode();
bLangErrorVariable.pos = getPosition(errorBindingPatternNode);
Optional<Node> errorTypeRef = errorBindingPatternNode.typeReference();
if (errorTypeRef.isPresent()) {
bLangErrorVariable.typeNode = createTypeNode(errorTypeRef.get());
}
SeparatedNodeList<BindingPatternNode> argListBindingPatterns =
errorBindingPatternNode.argListBindingPatterns();
int numberOfArgs = argListBindingPatterns.size();
List<BLangErrorVariable.BLangErrorDetailEntry> namedArgs = new ArrayList<>();
for (int position = 0; position < numberOfArgs; position++) {
BindingPatternNode bindingPatternNode = argListBindingPatterns.get(position);
switch (bindingPatternNode.kind()) {
case CAPTURE_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
if (position == 0) {
bLangErrorVariable.message =
(BLangSimpleVariable) getBLangVariableNode(bindingPatternNode);
break;
}
case ERROR_BINDING_PATTERN:
bLangErrorVariable.cause = getBLangVariableNode(bindingPatternNode);
break;
case NAMED_ARG_BINDING_PATTERN:
NamedArgBindingPatternNode namedArgBindingPatternNode =
(NamedArgBindingPatternNode) bindingPatternNode;
BLangIdentifier key =
createIdentifier(namedArgBindingPatternNode.argName());
BLangVariable valueBindingPattern =
getBLangVariableNode(namedArgBindingPatternNode.bindingPattern());
BLangErrorVariable.BLangErrorDetailEntry detailEntry =
new BLangErrorVariable.BLangErrorDetailEntry(key, valueBindingPattern);
namedArgs.add(detailEntry);
break;
default:
bLangErrorVariable.restDetail =
(BLangSimpleVariable) getBLangVariableNode(bindingPatternNode);
}
}
bLangErrorVariable.detail = namedArgs;
return bLangErrorVariable;
case REST_BINDING_PATTERN:
RestBindingPatternNode restBindingPatternNode = (RestBindingPatternNode) bindingPattern;
varName = restBindingPatternNode.variableName().name();
break;
case WILDCARD_BINDING_PATTERN:
WildcardBindingPatternNode wildcardBindingPatternNode = (WildcardBindingPatternNode) bindingPattern;
varName = wildcardBindingPatternNode.underscoreToken();
break;
case CAPTURE_BINDING_PATTERN:
default:
CaptureBindingPatternNode captureBindingPatternNode = (CaptureBindingPatternNode) bindingPattern;
varName = captureBindingPatternNode.variableName();
break;
}
Location pos = getPosition(bindingPattern);
return createSimpleVariable(pos, varName, getPosition(varName));
}
BLangValueType addValueType(Location pos, TypeKind typeKind) {
BLangValueType typeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
typeNode.pos = pos;
typeNode.typeKind = typeKind;
return typeNode;
}
private List<BLangStatement> generateBLangStatements(NodeList<StatementNode> statementNodes) {
List<BLangStatement> statements = new ArrayList<>();
return generateAndAddBLangStatements(statementNodes, statements);
}
private List<BLangStatement> generateAndAddBLangStatements(NodeList<StatementNode> statementNodes,
List<BLangStatement> statements) {
for (StatementNode statement : statementNodes) {
if (statement != null) {
if (statement.kind() == SyntaxKind.FORK_STATEMENT) {
generateForkStatements(statements, (ForkStatementNode) statement);
continue;
}
statements.add((BLangStatement) statement.apply(this));
}
}
return statements;
}
private String extractVersion(SeparatedNodeList<Token> versionNumbers) {
StringBuilder version = new StringBuilder();
int size = versionNumbers.size();
for (int i = 0; i < size; i++) {
if (i != 0) {
version.append(".");
}
version.append(versionNumbers.get(i).text());
}
return version.toString();
}
private void generateForkStatements(List<BLangStatement> statements, ForkStatementNode forkStatementNode) {
BLangForkJoin forkJoin = (BLangForkJoin) forkStatementNode.apply(this);
String nextAnonymousForkKey = anonymousModelHelper.getNextAnonymousForkKey(packageID);
for (NamedWorkerDeclarationNode workerDeclarationNode : forkStatementNode.namedWorkerDeclarations()) {
BLangSimpleVariableDef workerDef = (BLangSimpleVariableDef) workerDeclarationNode.apply(this);
workerDef.isWorker = true;
workerDef.isInFork = true;
workerDef.var.flagSet.add(Flag.FORKED);
BLangFunction function = ((BLangLambdaFunction) workerDef.var.expr).function;
function.addFlag(Flag.FORKED);
function.anonForkName = nextAnonymousForkKey;
statements.add(workerDef);
while (!this.additionalStatements.empty()) {
statements.add(additionalStatements.pop());
}
forkJoin.addWorkers(workerDef);
}
statements.add(forkJoin);
}
private BLangCheckedExpr createCheckExpr(Location pos, BLangExpression expr) {
BLangCheckedExpr checkedExpr = (BLangCheckedExpr) TreeBuilder.createCheckExpressionNode();
checkedExpr.pos = pos;
checkedExpr.expr = expr;
return checkedExpr;
}
private BLangCheckPanickedExpr createCheckPanickedExpr(Location pos, BLangExpression expr) {
BLangCheckPanickedExpr checkPanickedExpr =
(BLangCheckPanickedExpr) TreeBuilder.createCheckPanicExpressionNode();
checkPanickedExpr.pos = pos;
checkPanickedExpr.expr = expr;
return checkPanickedExpr;
}
private void populateFuncSignature(BLangFunction bLFunction, FunctionSignatureNode funcSignature) {
for (ParameterNode child : funcSignature.parameters()) {
SimpleVariableNode param = (SimpleVariableNode) child.apply(this);
if (child instanceof RestParameterNode) {
bLFunction.setRestParameter(param);
} else {
bLFunction.addParameter(param);
}
}
Optional<ReturnTypeDescriptorNode> retNode = funcSignature.returnTypeDesc();
if (retNode.isPresent()) {
ReturnTypeDescriptorNode returnType = retNode.get();
bLFunction.setReturnTypeNode(createTypeNode(returnType.type()));
bLFunction.returnTypeAnnAttachments = applyAll(returnType.annotations());
} else {
BLangValueType bLValueType = (BLangValueType) TreeBuilder.createValueTypeNode();
bLValueType.pos = symTable.builtinPos;
bLValueType.typeKind = TypeKind.NIL;
bLFunction.setReturnTypeNode(bLValueType);
}
}
private BLangUnaryExpr createBLangUnaryExpr(Location location,
OperatorKind operatorKind,
BLangExpression expr) {
BLangUnaryExpr bLUnaryExpr = (BLangUnaryExpr) TreeBuilder.createUnaryExpressionNode();
bLUnaryExpr.pos = location;
bLUnaryExpr.operator = operatorKind;
bLUnaryExpr.expr = expr;
return bLUnaryExpr;
}
private BLangExpression createExpression(Node expression) {
if (expression.kind() == SyntaxKind.ASYNC_SEND_ACTION) {
dlog.error(getPosition(expression), DiagnosticErrorCode.ASYNC_SEND_NOT_YET_SUPPORTED_AS_EXPRESSION);
Token missingIdentifier = NodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN,
NodeFactory.createEmptyMinutiaeList(), NodeFactory.createEmptyMinutiaeList());
expression = NodeFactory.createSimpleNameReferenceNode(missingIdentifier);
}
return (BLangExpression) createActionOrExpression(expression);
}
private BLangNode createActionOrExpression(Node actionOrExpression) {
if (isSimpleLiteral(actionOrExpression.kind())) {
return createSimpleLiteral(actionOrExpression);
} else if (actionOrExpression.kind() == SyntaxKind.SIMPLE_NAME_REFERENCE ||
actionOrExpression.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE ||
actionOrExpression.kind() == SyntaxKind.IDENTIFIER_TOKEN) {
BLangNameReference nameReference = createBLangNameReference(actionOrExpression);
BLangSimpleVarRef bLVarRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
bLVarRef.pos = getPosition(actionOrExpression);
bLVarRef.pkgAlias = this.createIdentifier((Location) nameReference.pkgAlias.getPosition(),
nameReference.pkgAlias.getValue());
bLVarRef.variableName = this.createIdentifier((Location) nameReference.name.getPosition(),
nameReference.name.getValue());
return bLVarRef;
} else if (actionOrExpression.kind() == SyntaxKind.BRACED_EXPRESSION) {
BLangGroupExpr group = (BLangGroupExpr) TreeBuilder.createGroupExpressionNode();
group.expression = (BLangExpression) actionOrExpression.apply(this);
group.pos = getPosition(actionOrExpression);
return group;
} else if (isType(actionOrExpression.kind())) {
BLangTypedescExpr typeAccessExpr = (BLangTypedescExpr) TreeBuilder.createTypeAccessNode();
typeAccessExpr.pos = getPosition(actionOrExpression);
typeAccessExpr.typeNode = createTypeNode(actionOrExpression);
return typeAccessExpr;
} else {
return actionOrExpression.apply(this);
}
}
private BLangNode createStringTemplateLiteral(NodeList<Node> memberNodes, Location location) {
BLangStringTemplateLiteral stringTemplateLiteral =
(BLangStringTemplateLiteral) TreeBuilder.createStringTemplateLiteralNode();
for (Node memberNode : memberNodes) {
stringTemplateLiteral.exprs.add((BLangExpression) memberNode.apply(this));
}
if (stringTemplateLiteral.exprs.isEmpty()) {
BLangLiteral emptyLiteral = createEmptyLiteral();
emptyLiteral.pos = location;
stringTemplateLiteral.exprs.add(emptyLiteral);
}
stringTemplateLiteral.pos = location;
return stringTemplateLiteral;
}
private BLangRawTemplateLiteral createRawTemplateLiteral(NodeList<Node> members, Location location) {
BLangRawTemplateLiteral literal = (BLangRawTemplateLiteral) TreeBuilder.createRawTemplateLiteralNode();
literal.pos = location;
boolean prevNodeWasInterpolation = false;
Node firstMember = members.isEmpty() ? null : members.get(0);
if (firstMember != null && firstMember.kind() == SyntaxKind.INTERPOLATION) {
literal.strings.add(createStringLiteral("", getPosition(firstMember)));
}
for (Node member : members) {
if (member.kind() == SyntaxKind.INTERPOLATION) {
literal.insertions.add((BLangExpression) member.apply(this));
if (prevNodeWasInterpolation) {
literal.strings.add(createStringLiteral("", getPosition(member)));
}
prevNodeWasInterpolation = true;
} else {
literal.strings.add((BLangLiteral) member.apply(this));
prevNodeWasInterpolation = false;
}
}
if (prevNodeWasInterpolation) {
literal.strings.add(createStringLiteral("", getPosition(members.get(members.size() - 1))));
}
return literal;
}
private BLangSimpleVariable createSimpleVar(Optional<Token> name, Node type, NodeList<AnnotationNode> annotations) {
if (name.isPresent()) {
Token nameToken = name.get();
return createSimpleVar(nameToken, type, null, null, annotations);
}
return createSimpleVar(null, type, null, null, annotations);
}
private BLangSimpleVariable createSimpleVar(Token name, Node type, NodeList<AnnotationNode> annotations) {
return createSimpleVar(name, type, null, null, annotations);
}
private BLangSimpleVariable createSimpleVar(Token name, Node typeName, Node initializer,
Token visibilityQualifier, NodeList<AnnotationNode> annotations) {
BLangSimpleVariable bLSimpleVar = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
bLSimpleVar.setName(this.createIdentifier(name));
bLSimpleVar.name.pos = getPosition(name);
if (isDeclaredWithVar(typeName)) {
bLSimpleVar.isDeclaredWithVar = true;
} else {
bLSimpleVar.setTypeNode(createTypeNode(typeName));
}
if (visibilityQualifier != null) {
if (visibilityQualifier.kind() == SyntaxKind.PRIVATE_KEYWORD) {
bLSimpleVar.flagSet.add(Flag.PRIVATE);
} else if (visibilityQualifier.kind() == SyntaxKind.PUBLIC_KEYWORD) {
bLSimpleVar.flagSet.add(Flag.PUBLIC);
}
}
if (initializer != null) {
bLSimpleVar.setInitialExpression(createExpression(initializer));
}
if (annotations != null) {
bLSimpleVar.annAttachments = applyAll(annotations);
}
return bLSimpleVar;
}
private boolean isDeclaredWithVar(Node typeNode) {
if (typeNode == null || typeNode.kind() == SyntaxKind.VAR_TYPE_DESC) {
return true;
}
return false;
}
private BLangIdentifier createIdentifier(Token token) {
return createIdentifier(getPosition(token), token);
}
private BLangIdentifier createIdentifier(Location pos, Token token) {
if (token == null) {
return createIdentifier(pos, null, null);
}
String identifierName;
if (token.isMissing()) {
identifierName = missingNodesHelper.getNextMissingNodeName(packageID);
} else {
identifierName = token.text();
}
return createIdentifier(pos, identifierName);
}
private BLangIdentifier createIdentifier(Location pos, String value) {
return createIdentifier(pos, value, null);
}
private BLangIdentifier createIdentifier(Location pos, String value, Set<Whitespace> ws) {
BLangIdentifier bLIdentifer = (BLangIdentifier) TreeBuilder.createIdentifierNode();
if (value == null) {
return bLIdentifer;
}
if (value.startsWith(IDENTIFIER_LITERAL_PREFIX)) {
bLIdentifer.setValue(IdentifierUtils.unescapeUnicodeCodepoints(value.substring(1)));
bLIdentifer.originalValue = value;
bLIdentifer.setLiteral(true);
} else {
bLIdentifer.setValue(IdentifierUtils.unescapeUnicodeCodepoints(value));
bLIdentifer.setLiteral(false);
}
bLIdentifer.pos = pos;
if (ws != null) {
bLIdentifer.addWS(ws);
}
return bLIdentifer;
}
private BLangLiteral createEmptyStringLiteral(Location pos) {
BLangLiteral bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
bLiteral.pos = pos;
bLiteral.type = symTable.stringType;
bLiteral.value = "";
bLiteral.originalValue = "";
return bLiteral;
}
private BLangLiteral createSimpleLiteral(Node literal) {
return createSimpleLiteral(literal, false);
}
private BLangLiteral createSimpleLiteral(Node literal, boolean isFiniteType) {
if (literal.kind() == SyntaxKind.UNARY_EXPRESSION) {
UnaryExpressionNode unaryExpr = (UnaryExpressionNode) literal;
BLangLiteral bLangLiteral =
createSimpleLiteral(unaryExpr.expression(), unaryExpr.unaryOperator().kind(), isFiniteType);
bLangLiteral.pos = getPosition(unaryExpr);
return bLangLiteral;
}
return createSimpleLiteral(literal, SyntaxKind.NONE, isFiniteType);
}
private BLangLiteral createSimpleLiteral(Node literal, SyntaxKind sign, boolean isFiniteType) {
BLangLiteral bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
SyntaxKind type = literal.kind();
int typeTag = -1;
Object value = null;
String originalValue = null;
String textValue;
if (literal instanceof BasicLiteralNode) {
textValue = ((BasicLiteralNode) literal).literalToken().text();
} else if (literal instanceof Token) {
textValue = ((Token) literal).text();
} else {
textValue = "";
}
if (sign == SyntaxKind.PLUS_TOKEN) {
textValue = "+" + textValue;
} else if (sign == SyntaxKind.MINUS_TOKEN) {
textValue = "-" + textValue;
}
if (type == SyntaxKind.NUMERIC_LITERAL) {
SyntaxKind literalTokenKind = ((BasicLiteralNode) literal).literalToken().kind();
if (literalTokenKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN ||
literalTokenKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) {
typeTag = TypeTags.INT;
value = getIntegerLiteral(literal, textValue, sign);
originalValue = textValue;
bLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression();
if (literalTokenKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN && withinByteRange(value)) {
typeTag = TypeTags.BYTE;
}
} else if (literalTokenKind == SyntaxKind.DECIMAL_FLOATING_POINT_LITERAL_TOKEN) {
typeTag = NumericLiteralSupport.isDecimalDiscriminated(textValue) ? TypeTags.DECIMAL : TypeTags.FLOAT;
if (isFiniteType) {
value = textValue.replaceAll("[fd+]", "");
originalValue = textValue.replace("+", "");
} else {
value = textValue;
originalValue = textValue;
}
bLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression();
} else if (literalTokenKind == SyntaxKind.HEX_FLOATING_POINT_LITERAL_TOKEN) {
typeTag = TypeTags.FLOAT;
value = getHexNodeValue(textValue);
originalValue = textValue;
bLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression();
}
} else if (type == SyntaxKind.BOOLEAN_LITERAL) {
typeTag = TypeTags.BOOLEAN;
value = Boolean.parseBoolean(textValue);
originalValue = textValue;
bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
} else if (type == SyntaxKind.STRING_LITERAL || type == SyntaxKind.XML_TEXT_CONTENT ||
type == SyntaxKind.TEMPLATE_STRING || type == SyntaxKind.IDENTIFIER_TOKEN) {
String text = textValue;
if (type == SyntaxKind.STRING_LITERAL) {
if (text.length() > 1 && text.charAt(text.length() - 1) == '"') {
text = text.substring(1, text.length() - 1);
} else {
text = text.substring(1);
}
}
String originalText = text;
Location pos = getPosition(literal);
Matcher matcher = IdentifierUtils.UNICODE_PATTERN.matcher(text);
int position = 0;
while (matcher.find(position)) {
String hexStringVal = matcher.group(1);
int hexDecimalVal = Integer.parseInt(hexStringVal, 16);
if ((hexDecimalVal >= Constants.MIN_UNICODE && hexDecimalVal <= Constants.MIDDLE_LIMIT_UNICODE)
|| hexDecimalVal > Constants.MAX_UNICODE) {
String hexStringWithBraces = matcher.group(0);
int offset = originalText.indexOf(hexStringWithBraces) + 1;
dlog.error(new BLangDiagnosticLocation(currentCompUnitName,
pos.lineRange().startLine().line(),
pos.lineRange().endLine().line(),
pos.lineRange().startLine().offset() + offset,
pos.lineRange().startLine().offset() + offset + hexStringWithBraces.length()),
DiagnosticErrorCode.INVALID_UNICODE, hexStringWithBraces);
}
text = matcher.replaceFirst("\\\\u" + fillWithZeros(hexStringVal));
position = matcher.end() - 2;
matcher = IdentifierUtils.UNICODE_PATTERN.matcher(text);
}
if (type != SyntaxKind.TEMPLATE_STRING && type != SyntaxKind.XML_TEXT_CONTENT) {
try {
text = StringEscapeUtils.unescapeJava(text);
} catch (Exception e) {
dlog.error(pos, DiagnosticErrorCode.INVALID_UNICODE, originalText);
}
}
typeTag = TypeTags.STRING;
value = text;
originalValue = textValue;
bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
} else if (type == SyntaxKind.NIL_LITERAL) {
originalValue = "()";
typeTag = TypeTags.NIL;
value = "()";
bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
} else if (type == SyntaxKind.NULL_LITERAL) {
originalValue = "null";
typeTag = TypeTags.NIL;
value = "null";
bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
} else if (type == SyntaxKind.BINARY_EXPRESSION) {
typeTag = TypeTags.BYTE_ARRAY;
value = textValue;
originalValue = textValue;
if (isNumericLiteral(type)) {
bLiteral = (BLangNumericLiteral) TreeBuilder.createNumericLiteralExpression();
} else {
bLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
}
} else if (type == SyntaxKind.BYTE_ARRAY_LITERAL) {
return (BLangLiteral) literal.apply(this);
}
bLiteral.pos = getPosition(literal);
bLiteral.type = symTable.getTypeFromTag(typeTag);
bLiteral.type.tag = typeTag;
bLiteral.value = value;
bLiteral.originalValue = originalValue;
return bLiteral;
}
private BLangLiteral createStringLiteral(String value, Location pos) {
BLangLiteral strLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
strLiteral.value = strLiteral.originalValue = value;
strLiteral.type = symTable.stringType;
strLiteral.pos = pos;
return strLiteral;
}
private BLangType createTypeNode(Node type) {
if (type instanceof BuiltinSimpleNameReferenceNode || type.kind() == SyntaxKind.NIL_TYPE_DESC) {
return createBuiltInTypeNode(type);
} else if (type.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE || type.kind() == SyntaxKind.IDENTIFIER_TOKEN) {
BLangUserDefinedType bLUserDefinedType = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode();
BLangNameReference nameReference = createBLangNameReference(type);
bLUserDefinedType.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
bLUserDefinedType.typeName = (BLangIdentifier) nameReference.name;
bLUserDefinedType.pos = getPosition(type);
return bLUserDefinedType;
} else if (type.kind() == SyntaxKind.SIMPLE_NAME_REFERENCE) {
if (type.hasDiagnostics()) {
BLangUserDefinedType bLUserDefinedType = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode();
BLangIdentifier pkgAlias = this.createIdentifier(null, "");
BLangIdentifier name = this.createIdentifier(((SimpleNameReferenceNode) type).name());
BLangNameReference nameReference = new BLangNameReference(getPosition(type), null, pkgAlias, name);
bLUserDefinedType.pkgAlias = (BLangIdentifier) nameReference.pkgAlias;
bLUserDefinedType.typeName = (BLangIdentifier) nameReference.name;
bLUserDefinedType.pos = getPosition(type);
return bLUserDefinedType;
}
SimpleNameReferenceNode nameReferenceNode = (SimpleNameReferenceNode) type;
return createTypeNode(nameReferenceNode.name());
}
return (BLangType) type.apply(this);
}
private BLangType createBuiltInTypeNode(Node type) {
String typeText;
if (type.kind() == SyntaxKind.NIL_TYPE_DESC) {
typeText = "()";
} else if (type instanceof BuiltinSimpleNameReferenceNode) {
BuiltinSimpleNameReferenceNode simpleNameRef = (BuiltinSimpleNameReferenceNode) type;
if (simpleNameRef.kind() == SyntaxKind.VAR_TYPE_DESC) {
return null;
} else if (simpleNameRef.name().isMissing()) {
String name = missingNodesHelper.getNextMissingNodeName(packageID);
BLangIdentifier identifier = createIdentifier(getPosition(simpleNameRef.name()), name);
BLangIdentifier pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
return createUserDefinedType(getPosition(type), pkgAlias, identifier);
}
typeText = simpleNameRef.name().text();
} else {
typeText = ((Token) type).text();
}
TypeKind typeKind = TreeUtils.stringToTypeKind(typeText.replaceAll("\\s+", ""));
SyntaxKind kind = type.kind();
switch (kind) {
case BOOLEAN_TYPE_DESC:
case INT_TYPE_DESC:
case BYTE_TYPE_DESC:
case FLOAT_TYPE_DESC:
case DECIMAL_TYPE_DESC:
case STRING_TYPE_DESC:
case ANY_TYPE_DESC:
case NIL_TYPE_DESC:
case HANDLE_TYPE_DESC:
case ANYDATA_TYPE_DESC:
case READONLY_TYPE_DESC:
BLangValueType valueType = (BLangValueType) TreeBuilder.createValueTypeNode();
valueType.typeKind = typeKind;
valueType.pos = getPosition(type);
return valueType;
default:
BLangBuiltInRefTypeNode builtInValueType =
(BLangBuiltInRefTypeNode) TreeBuilder.createBuiltInReferenceTypeNode();
builtInValueType.typeKind = typeKind;
builtInValueType.pos = getPosition(type);
return builtInValueType;
}
}
private VariableNode createBasicVarNodeWithoutType(Location location, Set<Whitespace> ws,
String identifier, Location identifierLocation,
ExpressionNode expr) {
BLangSimpleVariable bLSimpleVar = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
bLSimpleVar.pos = location;
IdentifierNode name = this.createIdentifier(identifierLocation, identifier, ws);
((BLangIdentifier) name).pos = identifierLocation;
bLSimpleVar.setName(name);
bLSimpleVar.addWS(ws);
if (expr != null) {
bLSimpleVar.setInitialExpression(expr);
}
return bLSimpleVar;
}
private BLangInvocation createBLangInvocation(Node nameNode, NodeList<FunctionArgumentNode> arguments,
Location position, boolean isAsync) {
BLangInvocation bLInvocation;
if (isAsync) {
bLInvocation = (BLangInvocation) TreeBuilder.createActionInvocation();
} else {
bLInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
}
BLangNameReference reference = createBLangNameReference(nameNode);
bLInvocation.pkgAlias = (BLangIdentifier) reference.pkgAlias;
bLInvocation.name = (BLangIdentifier) reference.name;
List<BLangExpression> args = new ArrayList<>();
arguments.iterator().forEachRemaining(arg -> args.add(createExpression(arg)));
bLInvocation.argExprs = args;
bLInvocation.pos = position;
return bLInvocation;
}
private BLangNameReference createBLangNameReference(Node node) {
switch (node.kind()) {
case QUALIFIED_NAME_REFERENCE:
QualifiedNameReferenceNode iNode = (QualifiedNameReferenceNode) node;
Token modulePrefix = iNode.modulePrefix();
IdentifierToken identifier = iNode.identifier();
BLangIdentifier pkgAlias = this.createIdentifier(getPosition(modulePrefix), modulePrefix);
Location namePos = getPosition(identifier);
BLangIdentifier name = this.createIdentifier(namePos, identifier);
return new BLangNameReference(getPosition(node), null, pkgAlias, name);
case ERROR_TYPE_DESC:
node = ((BuiltinSimpleNameReferenceNode) node).name();
break;
case NEW_KEYWORD:
case IDENTIFIER_TOKEN:
case ERROR_KEYWORD:
break;
case SIMPLE_NAME_REFERENCE:
default:
node = ((SimpleNameReferenceNode) node).name();
break;
}
Token iToken = (Token) node;
BLangIdentifier pkgAlias = this.createIdentifier(symTable.builtinPos, "");
BLangIdentifier name = this.createIdentifier(iToken);
return new BLangNameReference(getPosition(node), null, pkgAlias, name);
}
private BLangMarkdownDocumentation createMarkdownDocumentationAttachment(Optional<Node> markdownDocumentationNode) {
if (markdownDocumentationNode == null || !markdownDocumentationNode.isPresent()) {
return null;
}
BLangMarkdownDocumentation doc = (BLangMarkdownDocumentation) TreeBuilder.createMarkdownDocumentationNode();
LinkedList<BLangMarkdownDocumentationLine> documentationLines = new LinkedList<>();
LinkedList<BLangMarkdownParameterDocumentation> parameters = new LinkedList<>();
LinkedList<BLangMarkdownReferenceDocumentation> references = new LinkedList<>();
MarkdownDocumentationNode markdownDocNode = (MarkdownDocumentationNode) markdownDocumentationNode.get();
NodeList<Node> docLineList = markdownDocNode.documentationLines();
BLangMarkdownParameterDocumentation bLangParaDoc = null;
BLangMarkdownReturnParameterDocumentation bLangReturnParaDoc = null;
BLangMarkDownDeprecationDocumentation bLangDeprecationDoc = null;
BLangMarkDownDeprecatedParametersDocumentation bLangDeprecatedParaDoc = null;
for (Node singleDocLine : docLineList) {
switch (singleDocLine.kind()) {
case MARKDOWN_DOCUMENTATION_LINE:
case MARKDOWN_REFERENCE_DOCUMENTATION_LINE:
MarkdownDocumentationLineNode docLineNode = (MarkdownDocumentationLineNode) singleDocLine;
NodeList<Node> docElements = docLineNode.documentElements();
String docText = addReferencesAndReturnDocumentationText(references, docElements);
if (bLangDeprecationDoc != null) {
bLangDeprecationDoc.deprecationDocumentationLines.add(docText);
} else if (bLangReturnParaDoc != null) {
bLangReturnParaDoc.returnParameterDocumentationLines.add(docText);
} else if (bLangParaDoc != null) {
bLangParaDoc.parameterDocumentationLines.add(docText);
} else {
BLangMarkdownDocumentationLine bLangDocLine =
(BLangMarkdownDocumentationLine) TreeBuilder.createMarkdownDocumentationTextNode();
bLangDocLine.text = docText;
bLangDocLine.pos = getPosition(docLineNode);
documentationLines.add(bLangDocLine);
}
break;
case MARKDOWN_PARAMETER_DOCUMENTATION_LINE:
bLangParaDoc = new BLangMarkdownParameterDocumentation();
MarkdownParameterDocumentationLineNode parameterDocLineNode =
(MarkdownParameterDocumentationLineNode) singleDocLine;
BLangIdentifier paraName = new BLangIdentifier();
Token parameterName = parameterDocLineNode.parameterName();
String parameterNameValue = parameterName.isMissing() ? "" :
IdentifierUtils.unescapeUnicodeCodepoints(parameterName.text());
if (stringStartsWithSingleQuote(parameterNameValue)) {
parameterNameValue = parameterNameValue.substring(1);
}
paraName.value = parameterNameValue;
bLangParaDoc.parameterName = paraName;
NodeList<Node> paraDocElements = parameterDocLineNode.documentElements();
String paraDocText = addReferencesAndReturnDocumentationText(references, paraDocElements);
bLangParaDoc.parameterDocumentationLines.add(paraDocText);
bLangParaDoc.pos = getPosition(parameterName);
if (bLangDeprecatedParaDoc != null) {
bLangDeprecatedParaDoc.parameters.add(bLangParaDoc);
} else if (bLangDeprecationDoc != null) {
bLangDeprecatedParaDoc =
new BLangMarkDownDeprecatedParametersDocumentation();
bLangDeprecatedParaDoc.parameters.add(bLangParaDoc);
bLangDeprecationDoc = null;
} else {
parameters.add(bLangParaDoc);
}
break;
case MARKDOWN_RETURN_PARAMETER_DOCUMENTATION_LINE:
bLangReturnParaDoc = new BLangMarkdownReturnParameterDocumentation();
MarkdownParameterDocumentationLineNode returnParaDocLineNode =
(MarkdownParameterDocumentationLineNode) singleDocLine;
NodeList<Node> returnParaDocElements = returnParaDocLineNode.documentElements();
String returnParaDocText =
addReferencesAndReturnDocumentationText(references, returnParaDocElements);
bLangReturnParaDoc.returnParameterDocumentationLines.add(returnParaDocText);
bLangReturnParaDoc.pos = getPosition(returnParaDocLineNode);
doc.returnParameter = bLangReturnParaDoc;
break;
case MARKDOWN_DEPRECATION_DOCUMENTATION_LINE:
bLangDeprecationDoc = new BLangMarkDownDeprecationDocumentation();
MarkdownDocumentationLineNode deprecationDocLineNode =
(MarkdownDocumentationLineNode) singleDocLine;
String lineText = ((Token) deprecationDocLineNode.documentElements().get(0)).text();
bLangDeprecationDoc.addDeprecationLine("
bLangDeprecationDoc.pos = getPosition(deprecationDocLineNode);
break;
case MARKDOWN_CODE_BLOCK:
MarkdownCodeBlockNode codeBlockNode = (MarkdownCodeBlockNode) singleDocLine;
transformCodeBlock(documentationLines, codeBlockNode);
break;
default:
break;
}
}
doc.documentationLines = documentationLines;
doc.parameters = parameters;
doc.references = references;
doc.deprecationDocumentation = bLangDeprecationDoc;
doc.deprecatedParametersDocumentation = bLangDeprecatedParaDoc;
doc.pos = getPosition(markdownDocNode);
return doc;
}
private void transformCodeBlock(LinkedList<BLangMarkdownDocumentationLine> documentationLines,
MarkdownCodeBlockNode codeBlockNode) {
BLangMarkdownDocumentationLine bLangDocLine =
(BLangMarkdownDocumentationLine) TreeBuilder.createMarkdownDocumentationTextNode();
StringBuilder docText = new StringBuilder();
if (codeBlockNode.langAttribute().isPresent()) {
docText.append(codeBlockNode.startBacktick().text());
docText.append(codeBlockNode.langAttribute().get().toString());
} else {
docText.append(codeBlockNode.startBacktick().toString());
}
codeBlockNode.codeLines().forEach(codeLine -> docText.append(codeLine.codeDescription().toString()));
docText.append(codeBlockNode.endBacktick().text());
bLangDocLine.text = docText.toString();
bLangDocLine.pos = getPosition(codeBlockNode.startLineHashToken());
documentationLines.add(bLangDocLine);
}
private String addReferencesAndReturnDocumentationText(LinkedList<BLangMarkdownReferenceDocumentation> references,
NodeList<Node> docElements) {
StringBuilder docText = new StringBuilder();
for (Node element : docElements) {
if (element.kind() == SyntaxKind.BALLERINA_NAME_REFERENCE) {
BLangMarkdownReferenceDocumentation bLangRefDoc = new BLangMarkdownReferenceDocumentation();
BallerinaNameReferenceNode balNameRefNode = (BallerinaNameReferenceNode) element;
bLangRefDoc.pos = getPosition(balNameRefNode);
Token startBacktick = balNameRefNode.startBacktick();
Node backtickContent = balNameRefNode.nameReference();
Token endBacktick = balNameRefNode.endBacktick();
String contentString = backtickContent.isMissing() ? "" : backtickContent.toString();
bLangRefDoc.referenceName = contentString;
bLangRefDoc.type = DocumentationReferenceType.BACKTICK_CONTENT;
Optional<Token> referenceType = balNameRefNode.referenceType();
referenceType.ifPresent(
refType -> {
bLangRefDoc.type = stringToRefType(refType.text());
docText.append(refType.toString());
}
);
transformDocumentationBacktickContent(backtickContent, bLangRefDoc);
docText.append(startBacktick.isMissing() ? "" : startBacktick.text());
docText.append(contentString);
docText.append(endBacktick.isMissing() ? "" : endBacktick.text());
references.add(bLangRefDoc);
} else if (element.kind() == SyntaxKind.DOCUMENTATION_DESCRIPTION) {
Token docDescription = (Token) element;
docText.append(docDescription.text());
} else if (element.kind() == SyntaxKind.INLINE_CODE_REFERENCE) {
InlineCodeReferenceNode inlineCodeRefNode = (InlineCodeReferenceNode) element;
docText.append(inlineCodeRefNode.startBacktick().text());
docText.append(inlineCodeRefNode.codeReference().text());
docText.append(inlineCodeRefNode.endBacktick().text());
}
}
return trimLeftAtMostOne(docText.toString());
}
private String trimLeftAtMostOne(String text) {
int countToStrip = 0;
if (!text.isEmpty() && Character.isWhitespace(text.charAt(0))) {
countToStrip = 1;
}
return text.substring(countToStrip);
}
private void transformDocumentationBacktickContent(Node backtickContent,
BLangMarkdownReferenceDocumentation bLangRefDoc) {
QualifiedNameReferenceNode qualifiedRef;
SimpleNameReferenceNode simpleRef;
switch (backtickContent.kind()) {
case CODE_CONTENT:
bLangRefDoc.hasParserWarnings = true;
break;
case QUALIFIED_NAME_REFERENCE:
qualifiedRef = (QualifiedNameReferenceNode) backtickContent;
bLangRefDoc.qualifier = qualifiedRef.modulePrefix().text();
bLangRefDoc.identifier = qualifiedRef.identifier().text();
break;
case SIMPLE_NAME_REFERENCE:
simpleRef = (SimpleNameReferenceNode) backtickContent;
bLangRefDoc.identifier = simpleRef.name().text();
break;
case FUNCTION_CALL:
Node funcName = (((FunctionCallExpressionNode) backtickContent).functionName());
if (funcName.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
qualifiedRef = (QualifiedNameReferenceNode) funcName;
bLangRefDoc.qualifier = qualifiedRef.modulePrefix().text();
bLangRefDoc.identifier = qualifiedRef.identifier().text();
} else {
simpleRef = (SimpleNameReferenceNode) funcName;
bLangRefDoc.identifier = simpleRef.name().text();
}
break;
case METHOD_CALL:
MethodCallExpressionNode methodCallExprNode = (MethodCallExpressionNode) backtickContent;
bLangRefDoc.identifier =
((SimpleNameReferenceNode) methodCallExprNode.methodName()).name().text();
Node refName = methodCallExprNode.expression();
if (refName.kind() == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
qualifiedRef = (QualifiedNameReferenceNode) refName;
bLangRefDoc.qualifier = qualifiedRef.modulePrefix().text();
bLangRefDoc.typeName = qualifiedRef.identifier().text();
} else {
simpleRef = (SimpleNameReferenceNode) refName;
bLangRefDoc.typeName = simpleRef.name().text();
}
break;
default:
throw new IllegalArgumentException("Invalid backtick content transformation");
}
if (bLangRefDoc.identifier != null) {
bLangRefDoc.identifier = IdentifierUtils.unescapeUnicodeCodepoints(bLangRefDoc.identifier);
if (stringStartsWithSingleQuote(bLangRefDoc.identifier)) {
bLangRefDoc.identifier = bLangRefDoc.identifier.substring(1);
}
}
if (bLangRefDoc.qualifier != null) {
bLangRefDoc.qualifier = IdentifierUtils.unescapeUnicodeCodepoints(bLangRefDoc.qualifier);
if (stringStartsWithSingleQuote(bLangRefDoc.qualifier)) {
bLangRefDoc.qualifier = bLangRefDoc.qualifier.substring(1);
}
}
}
private DocumentationReferenceType stringToRefType(String refTypeName) {
switch (refTypeName) {
case "type":
return DocumentationReferenceType.TYPE;
case "service":
return DocumentationReferenceType.SERVICE;
case "variable":
return DocumentationReferenceType.VARIABLE;
case "var":
return DocumentationReferenceType.VAR;
case "annotation":
return DocumentationReferenceType.ANNOTATION;
case "module":
return DocumentationReferenceType.MODULE;
case "function":
return DocumentationReferenceType.FUNCTION;
case "parameter":
return DocumentationReferenceType.PARAMETER;
case "const":
return DocumentationReferenceType.CONST;
default:
return DocumentationReferenceType.BACKTICK_CONTENT;
}
}
private Object getIntegerLiteral(Node literal, String nodeValue, SyntaxKind sign) {
SyntaxKind literalTokenKind = ((BasicLiteralNode) literal).literalToken().kind();
if (literalTokenKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN) {
return parseLong(literal, nodeValue, nodeValue, 10, sign, DiagnosticErrorCode.INTEGER_TOO_SMALL,
DiagnosticErrorCode.INTEGER_TOO_LARGE);
} else if (literalTokenKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) {
String processedNodeValue = nodeValue.toLowerCase().replace("0x", "");
return parseLong(literal, nodeValue, processedNodeValue, 16, sign,
DiagnosticErrorCode.HEXADECIMAL_TOO_SMALL, DiagnosticErrorCode.HEXADECIMAL_TOO_LARGE);
}
return null;
}
private Object parseLong(Node literal, String originalNodeValue,
String processedNodeValue, int radix, SyntaxKind sign,
DiagnosticCode code1, DiagnosticCode code2) {
try {
return Long.parseLong(processedNodeValue, radix);
} catch (Exception e) {
Location pos = getPosition(literal);
if (sign == SyntaxKind.MINUS_TOKEN) {
pos = new BLangDiagnosticLocation(pos.lineRange().filePath(),
pos.lineRange().startLine().line(),
pos.lineRange().endLine().line(),
pos.lineRange().startLine().offset() - 1,
pos.lineRange().endLine().offset());
dlog.error(pos, code1, originalNodeValue);
} else {
dlog.error(pos, code2, originalNodeValue);
}
}
return originalNodeValue;
}
private String getHexNodeValue(String value) {
if (!(value.contains("p") || value.contains("P"))) {
value = value + "p0";
}
return value;
}
private String fillWithZeros(String str) {
while (str.length() < 4) {
str = "0".concat(str);
}
return str;
}
private void markVariableWithFlag(BLangVariable variable, Flag flag) {
variable.flagSet.add(flag);
switch (variable.getKind()) {
case TUPLE_VARIABLE:
BLangTupleVariable tupleVariable = (BLangTupleVariable) variable;
for (BLangVariable var : tupleVariable.memberVariables) {
markVariableWithFlag(var, flag);
}
if (tupleVariable.restVariable != null) {
markVariableWithFlag(tupleVariable.restVariable, flag);
}
break;
case RECORD_VARIABLE:
BLangRecordVariable recordVariable = (BLangRecordVariable) variable;
for (BLangRecordVariableKeyValue keyValue : recordVariable.variableList) {
markVariableWithFlag(keyValue.getValue(), flag);
}
if (recordVariable.restParam != null) {
markVariableWithFlag((BLangVariable) recordVariable.restParam, flag);
}
break;
case ERROR_VARIABLE:
BLangErrorVariable errorVariable = (BLangErrorVariable) variable;
BLangSimpleVariable message = errorVariable.message;
if (message != null) {
markVariableWithFlag(message, flag);
}
BLangVariable cause = errorVariable.cause;
if (cause != null) {
markVariableWithFlag(cause, flag);
}
errorVariable.detail.forEach(entry -> markVariableWithFlag(entry.valueBindingPattern, flag));
if (errorVariable.restDetail != null) {
markVariableWithFlag(errorVariable.restDetail, flag);
}
break;
}
}
private boolean isSimpleLiteral(SyntaxKind syntaxKind) {
switch (syntaxKind) {
case STRING_LITERAL:
case NUMERIC_LITERAL:
case BOOLEAN_LITERAL:
case NIL_LITERAL:
case NULL_LITERAL:
return true;
default:
return false;
}
}
static boolean isType(SyntaxKind nodeKind) {
switch (nodeKind) {
case RECORD_TYPE_DESC:
case OBJECT_TYPE_DESC:
case NIL_TYPE_DESC:
case OPTIONAL_TYPE_DESC:
case ARRAY_TYPE_DESC:
case INT_TYPE_DESC:
case BYTE_TYPE_DESC:
case FLOAT_TYPE_DESC:
case DECIMAL_TYPE_DESC:
case STRING_TYPE_DESC:
case BOOLEAN_TYPE_DESC:
case XML_TYPE_DESC:
case JSON_TYPE_DESC:
case HANDLE_TYPE_DESC:
case ANY_TYPE_DESC:
case ANYDATA_TYPE_DESC:
case NEVER_TYPE_DESC:
case VAR_TYPE_DESC:
case SERVICE_TYPE_DESC:
case PARAMETERIZED_TYPE_DESC:
case UNION_TYPE_DESC:
case ERROR_TYPE_DESC:
case STREAM_TYPE_DESC:
case TABLE_TYPE_DESC:
case FUNCTION_TYPE_DESC:
case TUPLE_TYPE_DESC:
case PARENTHESISED_TYPE_DESC:
case READONLY_TYPE_DESC:
case DISTINCT_TYPE_DESC:
case INTERSECTION_TYPE_DESC:
case SINGLETON_TYPE_DESC:
case TYPE_REFERENCE_TYPE_DESC:
return true;
default:
return false;
}
}
private boolean isNumericLiteral(SyntaxKind syntaxKind) {
switch (syntaxKind) {
case NUMERIC_LITERAL:
return true;
default:
return false;
}
}
private boolean isPresent(Node node) {
return node.kind() != SyntaxKind.NONE;
}
private boolean checkIfAnonymous(Node node) {
SyntaxKind parentKind = node.parent().kind();
return parentKind != SyntaxKind.DISTINCT_TYPE_DESC && parentKind != SyntaxKind.TYPE_DEFINITION;
}
private boolean ifInLocalContext(Node parent) {
while (parent != null) {
if (parent instanceof StatementNode) {
return true;
}
parent = parent.parent();
}
return false;
}
private BLangType createAnonymousRecordType(RecordTypeDescriptorNode recordTypeDescriptorNode,
BLangRecordTypeNode recordTypeNode) {
BLangTypeDefinition typeDef = (BLangTypeDefinition) TreeBuilder.createTypeDefinition();
Location pos = getPosition(recordTypeDescriptorNode);
String genName = anonymousModelHelper.getNextAnonymousTypeKey(this.packageID);
IdentifierNode anonTypeGenName = createIdentifier(symTable.builtinPos, genName, null);
typeDef.setName(anonTypeGenName);
typeDef.flagSet.add(Flag.PUBLIC);
typeDef.flagSet.add(Flag.ANONYMOUS);
typeDef.typeNode = recordTypeNode;
typeDef.pos = pos;
addToTop(typeDef);
return createUserDefinedType(pos, (BLangIdentifier) TreeBuilder.createIdentifierNode(), typeDef.name);
}
private BLangUserDefinedType createUserDefinedType(Location pos,
BLangIdentifier pkgAlias,
BLangIdentifier name) {
BLangUserDefinedType userDefinedType = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode();
userDefinedType.pos = pos;
userDefinedType.pkgAlias = pkgAlias;
userDefinedType.typeName = name;
return userDefinedType;
}
private boolean withinByteRange(Object num) {
if (num instanceof Long) {
return (Long) num <= 255 && (Long) num >= 0;
}
return false;
}
private class SimpleVarBuilder {
private BLangIdentifier name;
private BLangType type;
private boolean isDeclaredWithVar;
private Set<Flag> flags = new HashSet<>();
private boolean isFinal;
private ExpressionNode expr;
private Location pos;
public BLangSimpleVariable build() {
BLangSimpleVariable bLSimpleVar = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
bLSimpleVar.setName(this.name);
bLSimpleVar.setTypeNode(this.type);
bLSimpleVar.isDeclaredWithVar = this.isDeclaredWithVar;
bLSimpleVar.setTypeNode(this.type);
bLSimpleVar.flagSet.addAll(this.flags);
if (this.isFinal) {
markVariableWithFlag(bLSimpleVar, Flag.FINAL);
}
bLSimpleVar.setInitialExpression(this.expr);
bLSimpleVar.pos = pos;
return bLSimpleVar;
}
public SimpleVarBuilder with(String name) {
this.name = createIdentifier(null, name);
return this;
}
public SimpleVarBuilder with(String name, Location identifierPos) {
this.name = createIdentifier(identifierPos, name);
return this;
}
public SimpleVarBuilder with(Token token) {
this.name = createIdentifier(token);
return this;
}
public SimpleVarBuilder setTypeByNode(Node typeName) {
this.isDeclaredWithVar = typeName == null || typeName.kind() == SyntaxKind.VAR_TYPE_DESC;
if (typeName == null) {
return this;
}
this.type = createTypeNode(typeName);
return this;
}
public SimpleVarBuilder setExpressionByNode(Node initExprNode) {
this.expr = initExprNode != null ? createExpression(initExprNode) : null;
return this;
}
public SimpleVarBuilder setExpression(ExpressionNode expression) {
this.expr = expression;
return this;
}
public SimpleVarBuilder isDeclaredWithVar() {
this.isDeclaredWithVar = true;
return this;
}
public SimpleVarBuilder isFinal() {
this.isFinal = true;
return this;
}
public SimpleVarBuilder isListenerVar() {
this.flags.add(Flag.LISTENER);
this.flags.add(Flag.FINAL);
return this;
}
public SimpleVarBuilder setVisibility(Token visibilityQualifier) {
if (visibilityQualifier != null) {
if (visibilityQualifier.kind() == SyntaxKind.PRIVATE_KEYWORD) {
this.flags.add(Flag.PRIVATE);
} else if (visibilityQualifier.kind() == SyntaxKind.PUBLIC_KEYWORD) {
this.flags.add(Flag.PUBLIC);
}
}
return this;
}
public SimpleVarBuilder setFinal(boolean present) {
this.isFinal = present;
return this;
}
public SimpleVarBuilder setOptional(boolean present) {
if (present) {
this.flags.add(Flag.PUBLIC);
} else {
this.flags.remove(Flag.PUBLIC);
}
return this;
}
public SimpleVarBuilder setRequired(boolean present) {
if (present) {
this.flags.add(Flag.REQUIRED);
} else {
this.flags.remove(Flag.REQUIRED);
}
return this;
}
public SimpleVarBuilder isPublic() {
this.flags.add(Flag.PUBLIC);
return this;
}
public SimpleVarBuilder isWorkerVar() {
this.flags.add(Flag.WORKER);
return this;
}
public SimpleVarBuilder setPos(Location pos) {
this.pos = pos;
return this;
}
}
private void addFinalQualifier(BLangSimpleVariable simpleVar) {
simpleVar.flagSet.add(Flag.FINAL);
}
private void addToTop(TopLevelNode topLevelNode) {
if (currentCompilationUnit != null) {
currentCompilationUnit.addTopLevelNode(topLevelNode);
}
}
private Location expandLeft(Location location, Location upTo) {
assert location.lineRange().startLine().line() > upTo.lineRange().startLine().line() ||
(location.lineRange().startLine().line() == upTo.lineRange().startLine().line() &&
location.lineRange().startLine().offset() >= upTo.lineRange().startLine().offset());
Location expandedLocation = new BLangDiagnosticLocation(location.lineRange().filePath(),
upTo.lineRange().startLine().line(),
location.lineRange().endLine().line(),
upTo.lineRange().startLine().offset(),
location.lineRange().endLine().offset());
return expandedLocation;
}
private Location trimLeft(Location location, Location upTo) {
assert location.lineRange().startLine().line() < upTo.lineRange().startLine().line() ||
(location.lineRange().startLine().line() == upTo.lineRange().startLine().line() &&
location.lineRange().startLine().offset() <= upTo.lineRange().startLine().offset());
Location trimmedLocation = new BLangDiagnosticLocation(location.lineRange().filePath(),
upTo.lineRange().startLine().line(),
location.lineRange().endLine().line(),
upTo.lineRange().startLine().offset(),
location.lineRange().endLine().offset());
return trimmedLocation;
}
private Location trimRight(Location location, Location upTo) {
assert location.lineRange().endLine().line() > upTo.lineRange().endLine().line() ||
(location.lineRange().endLine().line() == upTo.lineRange().endLine().line() &&
location.lineRange().endLine().offset() >= upTo.lineRange().endLine().offset());
Location trimmedLocation = new BLangDiagnosticLocation(location.lineRange().filePath(),
location.lineRange().startLine().line(),
upTo.lineRange().endLine().line(),
location.lineRange().startLine().offset(),
upTo.lineRange().endLine().offset());
return trimmedLocation;
}
private void setClassQualifiers(NodeList<Token> qualifiers, BLangClassDefinition blangClass) {
for (Token qualifier : qualifiers) {
SyntaxKind kind = qualifier.kind();
switch (kind) {
case DISTINCT_KEYWORD:
blangClass.flagSet.add(Flag.DISTINCT);
break;
case CLIENT_KEYWORD:
blangClass.flagSet.add(Flag.CLIENT);
break;
case READONLY_KEYWORD:
blangClass.flagSet.add(Flag.READONLY);
break;
case SERVICE_KEYWORD:
blangClass.flagSet.add(Flag.SERVICE);
break;
case ISOLATED_KEYWORD:
blangClass.flagSet.add(Flag.ISOLATED);
break;
default:
throw new RuntimeException("Syntax kind is not supported: " + kind);
}
}
}
} |
endpoint is a URL, it is possible that it also contains subpath, can we only override port value for the endpoint? | public WebPubSubAsyncServiceClient buildAsyncClient() {
if (hub == null || hub.isEmpty()) {
logger.logThrowableAsError(
new IllegalStateException("hub is not valid - it must be non-null and non-empty."));
}
if (endpoint == null && credential == null) {
final Map<String, String> csParams = parseConnectionString(connectionString);
if (!csParams.containsKey("endpoint") && !csParams.containsKey("accesskey")) {
logger.logThrowableAsError(new IllegalArgumentException(
"Connection string does not contain required 'endpoint' and 'accesskey' values"));
}
final String accessKey = csParams.get("accesskey");
this.credential = new AzureKeyCredential(accessKey);
this.endpoint = csParams.get("endpoint");
String port = csParams.get("port");
if (!CoreUtils.isNullOrEmpty(port)) {
this.endpoint = this.endpoint + ":" + port;
}
}
final AzureWebPubSubServiceRestAPIImplBuilder innerBuilder = new AzureWebPubSubServiceRestAPIImplBuilder();
if (endpoint == null || endpoint.isEmpty()) {
logger.logThrowableAsError(
new IllegalStateException("endpoint is not valid - it must be non-null and non-empty."));
}
innerBuilder.host(endpoint);
final WebPubSubServiceVersion serviceVersion =
version != null ? version : WebPubSubServiceVersion.getLatest();
WebPubSubAuthenticationPolicy webPubSubAuthPolicy = new WebPubSubAuthenticationPolicy(credential);
if (pipeline != null) {
innerBuilder.pipeline(pipeline);
return buildAsyncClient(innerBuilder, hub, endpoint, webPubSubAuthPolicy, serviceVersion);
}
if (credential == null) {
logger.logThrowableAsError(
new IllegalStateException("No credential has been specified - it must be non-null and non-empty."));
}
final Configuration buildConfiguration =
(configuration == null) ? Configuration.getGlobalConfiguration().clone() : configuration;
final String clientName = properties.getOrDefault(SDK_NAME, "UnknownName");
final String clientVersion = properties.getOrDefault(SDK_VERSION, "UnknownVersion");
String applicationId =
clientOptions == null ? httpLogOptions.getApplicationId() : clientOptions.getApplicationId();
final List<HttpPipelinePolicy> policies = new ArrayList<>();
policies.add(new UserAgentPolicy(applicationId, clientName, clientVersion,
buildConfiguration));
policies.add(new CookiePolicy());
HttpPolicyProviders.addBeforeRetryPolicies(policies);
policies.add(retryPolicy == null ? DEFAULT_RETRY_POLICY : retryPolicy);
policies.add(webPubSubAuthPolicy);
policies.addAll(this.policies);
if (clientOptions != null) {
List<HttpHeader> httpHeaderList = new ArrayList<>();
clientOptions.getHeaders().forEach(header ->
httpHeaderList.add(new HttpHeader(header.getName(), header.getValue())));
policies.add(new AddHeadersPolicy(new HttpHeaders(httpHeaderList)));
}
HttpPolicyProviders.addAfterRetryPolicies(policies);
policies.add(new HttpLoggingPolicy(httpLogOptions));
innerBuilder.pipeline(new HttpPipelineBuilder()
.policies(policies.toArray(new HttpPipelinePolicy[0]))
.httpClient(httpClient)
.build());
return buildAsyncClient(innerBuilder, hub, endpoint, webPubSubAuthPolicy, serviceVersion);
} | this.endpoint = this.endpoint + ":" + port; | public WebPubSubAsyncServiceClient buildAsyncClient() {
if (hub == null || hub.isEmpty()) {
logger.logThrowableAsError(
new IllegalStateException("hub is not valid - it must be non-null and non-empty."));
}
if (endpoint == null && credential == null) {
final Map<String, String> csParams = parseConnectionString(connectionString);
if (!csParams.containsKey("endpoint") && !csParams.containsKey("accesskey")) {
logger.logThrowableAsError(new IllegalArgumentException(
"Connection string does not contain required 'endpoint' and 'accesskey' values"));
}
final String accessKey = csParams.get("accesskey");
this.credential = new AzureKeyCredential(accessKey);
String csEndpoint = csParams.get("endpoint");
URL url;
try {
url = new URL(csEndpoint);
this.endpoint = csEndpoint;
} catch (MalformedURLException e) {
throw logger.logExceptionAsWarning(new IllegalArgumentException("Connection string contains invalid "
+ "endpoint", e));
}
String port = csParams.get("port");
if (!CoreUtils.isNullOrEmpty(port)) {
this.endpoint = UrlBuilder.parse(url).setPort(port).toString();
}
}
final AzureWebPubSubServiceRestAPIImplBuilder innerBuilder = new AzureWebPubSubServiceRestAPIImplBuilder();
if (endpoint == null || endpoint.isEmpty()) {
logger.logThrowableAsError(
new IllegalStateException("endpoint is not valid - it must be non-null and non-empty."));
}
innerBuilder.host(endpoint);
final WebPubSubServiceVersion serviceVersion =
version != null ? version : WebPubSubServiceVersion.getLatest();
WebPubSubAuthenticationPolicy webPubSubAuthPolicy = new WebPubSubAuthenticationPolicy(credential);
if (pipeline != null) {
innerBuilder.pipeline(pipeline);
return buildAsyncClient(innerBuilder, hub, endpoint, webPubSubAuthPolicy, serviceVersion);
}
if (credential == null) {
logger.logThrowableAsError(
new IllegalStateException("No credential has been specified - it must be non-null and non-empty."));
}
final Configuration buildConfiguration =
(configuration == null) ? Configuration.getGlobalConfiguration().clone() : configuration;
final String clientName = properties.getOrDefault(SDK_NAME, "UnknownName");
final String clientVersion = properties.getOrDefault(SDK_VERSION, "UnknownVersion");
String applicationId =
clientOptions == null ? httpLogOptions.getApplicationId() : clientOptions.getApplicationId();
final List<HttpPipelinePolicy> policies = new ArrayList<>();
policies.add(new UserAgentPolicy(applicationId, clientName, clientVersion,
buildConfiguration));
policies.add(new CookiePolicy());
HttpPolicyProviders.addBeforeRetryPolicies(policies);
policies.add(retryPolicy == null ? DEFAULT_RETRY_POLICY : retryPolicy);
policies.add(webPubSubAuthPolicy);
policies.addAll(this.policies);
if (clientOptions != null) {
List<HttpHeader> httpHeaderList = new ArrayList<>();
clientOptions.getHeaders().forEach(header ->
httpHeaderList.add(new HttpHeader(header.getName(), header.getValue())));
policies.add(new AddHeadersPolicy(new HttpHeaders(httpHeaderList)));
}
HttpPolicyProviders.addAfterRetryPolicies(policies);
policies.add(new HttpLoggingPolicy(httpLogOptions));
innerBuilder.pipeline(new HttpPipelineBuilder()
.policies(policies.toArray(new HttpPipelinePolicy[0]))
.httpClient(httpClient)
.build());
return buildAsyncClient(innerBuilder, hub, endpoint, webPubSubAuthPolicy, serviceVersion);
} | class WebPubSubClientBuilder {
private final ClientLogger logger = new ClientLogger(WebPubSubClientBuilder.class);
private static final String WEBPUBSUB_PROPERTIES = "azure-messaging-webpubsub.properties";
private static final String SDK_NAME = "name";
private static final String SDK_VERSION = "version";
private static final HttpPipelinePolicy DEFAULT_RETRY_POLICY = new RetryPolicy();
private final Map<String, String> properties;
private final List<HttpPipelinePolicy> policies;
private String connectionString;
private String endpoint;
private AzureKeyCredential credential;
private HttpClient httpClient;
private HttpLogOptions httpLogOptions;
private HttpPipeline pipeline;
private RetryPolicy retryPolicy;
private Configuration configuration;
private WebPubSubServiceVersion version;
private String hub;
private ClientOptions clientOptions;
/**
* Creates a new builder instance with all values set to their default value.
*/
public WebPubSubClientBuilder() {
policies = new ArrayList<>();
httpLogOptions = new HttpLogOptions();
properties = CoreUtils.getProperties(WEBPUBSUB_PROPERTIES);
}
/**
* Sets the {@link ClientOptions} which enables various options to be set on the client. For example setting an
* {@code applicationId} using {@link ClientOptions
* the {@link UserAgentPolicy} for telemetry/monitoring purposes.
*
* <p>More About <a href="https:
*
* @param clientOptions the {@link ClientOptions} to be set on the client.
* @return The updated WebPubSubClientBuilder object.
*/
public WebPubSubClientBuilder clientOptions(ClientOptions clientOptions) {
this.clientOptions = clientOptions;
return this;
}
/**
* Sets the credential to use when authenticating HTTP requests.
*
* @param connectionString Connection string in the format "endpoint={endpoint_value};accesskey={accesskey_value}"
* @return The updated WebPubSubClientBuilder object.
* @throws NullPointerException If {@code connectionString} is {@code null}.
*/
public WebPubSubClientBuilder connectionString(final String connectionString) {
Objects.requireNonNull(connectionString, "'connectionString' cannot be null.");
this.connectionString = connectionString;
return this;
}
/**
* Sets the service endpoint for the Azure Web Pub Sub instance.
*
* @param endpoint The URL of the Azure Web Pub Sub instance to send service requests to, and receive responses
* from.
* @return The updated WebPubSubClientBuilder object.
* @throws IllegalArgumentException if {@code endpoint} is {@code null}.
*/
public WebPubSubClientBuilder endpoint(final String endpoint) {
Objects.requireNonNull(endpoint, "'endpoint' cannot be null.");
this.endpoint = endpoint;
return this;
}
/**
* Sets the {@link AzureKeyCredential} used to authenticate HTTP requests.
*
* @param credential AzureKeyCredential used to authenticate HTTP requests.
* @return The updated WebPubSubClientBuilder object.
* @throws NullPointerException If {@code credential} is {@code null}.
*/
public WebPubSubClientBuilder credential(final AzureKeyCredential credential) {
Objects.requireNonNull(credential, "'credential' cannot be null.");
this.credential = credential;
return this;
}
/**
* Target hub name, which should start with alphabetic characters and only contain alpha-numeric characters or
* underscore.
*
* @param hub Target hub name, which should start with alphabetic characters and only contain alpha-numeric
* characters or underscore.
* @return The updated WebPubSubClientBuilder object.
* @throws NullPointerException If {@code hub} is {@code null}.
*/
public WebPubSubClientBuilder hub(final String hub) {
Objects.requireNonNull(hub, "'hub' cannot be null.");
this.hub = hub;
return this;
}
/**
* Sets the logging configuration for HTTP requests and responses.
*
* <p> If logLevel is not provided, default value of {@link com.azure.core.http.policy.HttpLogDetailLevel
* set.</p>
*
* @param logOptions The logging configuration to use when sending and receiving HTTP requests/responses.
* @return The updated WebPubSubClientBuilder object.
*/
public WebPubSubClientBuilder httpLogOptions(final HttpLogOptions logOptions) {
httpLogOptions = logOptions;
return this;
}
/**
* Adds a policy to the set of existing policies that are executed after required policies.
*
* @param policy The retry policy for service requests.
* @return The updated WebPubSubClientBuilder object.
* @throws NullPointerException If {@code policy} is {@code null}.
*/
public WebPubSubClientBuilder addPolicy(final HttpPipelinePolicy policy) {
Objects.requireNonNull(policy);
policies.add(policy);
return this;
}
/**
* Sets the HTTP client to use for sending and receiving requests to and from the service.
*
* @param client The HTTP client to use for requests.
* @return The updated WebPubSubClientBuilder object.
*/
public WebPubSubClientBuilder httpClient(final HttpClient client) {
if (this.httpClient != null && client == null) {
logger.info("HttpClient is being set to 'null' when it was previously configured.");
}
this.httpClient = client;
return this;
}
/**
* Sets the HTTP pipeline to use for the service client.
*
* If {@code pipeline} is set, all other settings are ignored, aside from
* {@link WebPubSubClientBuilder
* {@link WebPubSubServiceClient}.
*
* @param pipeline The HTTP pipeline to use for sending service requests and receiving responses.
* @return The updated WebPubSubClientBuilder object.
*/
public WebPubSubClientBuilder pipeline(final HttpPipeline pipeline) {
if (this.pipeline != null && pipeline == null) {
logger.info("HttpPipeline is being set to 'null' when it was previously configured.");
}
this.pipeline = pipeline;
return this;
}
/**
* Sets the configuration store that is used during construction of the service client.
*
* The default configuration store is a clone of the {@link Configuration
* configuration store}, use {@link Configuration
*
* @param configuration The configuration store used to
* @return The updated WebPubSubClientBuilder object.
*/
public WebPubSubClientBuilder configuration(final Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Sets the {@link HttpPipelinePolicy} that is used when each request is sent. The default retry policy will be
* used if not provided.
*
* @param retryPolicy user's retry policy applied to each request.
* @return The updated WebPubSubClientBuilder object.
*/
public WebPubSubClientBuilder retryPolicy(final RetryPolicy retryPolicy) {
this.retryPolicy = retryPolicy;
return this;
}
/**
* Sets the {@link WebPubSubServiceVersion} that is used when making API requests.
* <p>
* If a service version is not provided, the service version that will be used will be the latest known service
* version based on the version of the client library being used. If no service version is specified, updating to a
* newer version the client library will have the result of potentially moving to a newer service version.
*
* @param version {@link WebPubSubServiceVersion} of the service to be used when making requests.
* @return The updated WebPubSubClientBuilder object.
*/
public WebPubSubClientBuilder serviceVersion(final WebPubSubServiceVersion version) {
this.version = version;
return this;
}
/**
* Builds an instance of WebPubSubAsyncServiceClient with the provided parameters.
*
* @return an instance of WebPubSubAsyncServiceClient.
*/
private WebPubSubAsyncServiceClient buildAsyncClient(final AzureWebPubSubServiceRestAPIImplBuilder innerBuilder,
final String hub,
final String endpoint,
final WebPubSubAuthenticationPolicy webPubSubAuthPolicy,
final WebPubSubServiceVersion serviceVersion) {
return new WebPubSubAsyncServiceClient(
innerBuilder.buildClient().getWebPubSubs(), hub, endpoint, webPubSubAuthPolicy, serviceVersion);
}
/**
* Builds an instance of WebPubSubServiceClient with the provided parameters.
*
* @return an instance of WebPubSubServiceClient.
*/
public WebPubSubServiceClient buildClient() {
return new WebPubSubServiceClient(buildAsyncClient());
}
private Map<String, String> parseConnectionString(final String cs) {
final String[] params = cs.split(";");
final Map<String, String> connectionStringParams = new HashMap<>();
for (final String param : params) {
final String[] paramSplit = param.split("=", 2);
if (paramSplit.length != 2) {
continue;
}
final String key = paramSplit[0].trim().toLowerCase(Locale.ROOT);
if (connectionStringParams.containsKey(key)) {
logger.logThrowableAsError(new IllegalArgumentException(
"Duplicate connection string key parameter provided for key '" + key + "'"));
}
final String value = paramSplit[1].trim();
connectionStringParams.put(key, value);
}
return connectionStringParams;
}
} | class WebPubSubClientBuilder {
private final ClientLogger logger = new ClientLogger(WebPubSubClientBuilder.class);
private static final String WEBPUBSUB_PROPERTIES = "azure-messaging-webpubsub.properties";
private static final String SDK_NAME = "name";
private static final String SDK_VERSION = "version";
private static final HttpPipelinePolicy DEFAULT_RETRY_POLICY = new RetryPolicy();
private final Map<String, String> properties;
private final List<HttpPipelinePolicy> policies;
private String connectionString;
private String endpoint;
private AzureKeyCredential credential;
private HttpClient httpClient;
private HttpLogOptions httpLogOptions;
private HttpPipeline pipeline;
private RetryPolicy retryPolicy;
private Configuration configuration;
private WebPubSubServiceVersion version;
private String hub;
private ClientOptions clientOptions;
/**
* Creates a new builder instance with all values set to their default value.
*/
public WebPubSubClientBuilder() {
policies = new ArrayList<>();
httpLogOptions = new HttpLogOptions();
properties = CoreUtils.getProperties(WEBPUBSUB_PROPERTIES);
}
/**
* Sets the {@link ClientOptions} which enables various options to be set on the client. For example setting an
* {@code applicationId} using {@link ClientOptions
* the {@link UserAgentPolicy} for telemetry/monitoring purposes.
*
* <p>More About <a href="https:
*
* @param clientOptions the {@link ClientOptions} to be set on the client.
* @return The updated WebPubSubClientBuilder object.
*/
public WebPubSubClientBuilder clientOptions(ClientOptions clientOptions) {
this.clientOptions = clientOptions;
return this;
}
/**
* Sets the credential to use when authenticating HTTP requests.
*
* @param connectionString Connection string in the format "endpoint={endpoint_value};accesskey={accesskey_value}"
* @return The updated WebPubSubClientBuilder object.
* @throws NullPointerException If {@code connectionString} is {@code null}.
*/
public WebPubSubClientBuilder connectionString(final String connectionString) {
Objects.requireNonNull(connectionString, "'connectionString' cannot be null.");
this.connectionString = connectionString;
return this;
}
/**
* Sets the service endpoint for the Azure Web Pub Sub instance.
*
* @param endpoint The URL of the Azure Web Pub Sub instance to send service requests to, and receive responses
* from.
* @return The updated WebPubSubClientBuilder object.
* @throws IllegalArgumentException if {@code endpoint} is {@code null}.
*/
public WebPubSubClientBuilder endpoint(final String endpoint) {
Objects.requireNonNull(endpoint, "'endpoint' cannot be null.");
try {
new URL(endpoint);
} catch (MalformedURLException e) {
throw logger.logExceptionAsWarning(new IllegalArgumentException("'endpoint' must be valid URL", e));
}
this.endpoint = endpoint;
return this;
}
/**
* Sets the {@link AzureKeyCredential} used to authenticate HTTP requests.
*
* @param credential AzureKeyCredential used to authenticate HTTP requests.
* @return The updated WebPubSubClientBuilder object.
* @throws NullPointerException If {@code credential} is {@code null}.
*/
public WebPubSubClientBuilder credential(final AzureKeyCredential credential) {
Objects.requireNonNull(credential, "'credential' cannot be null.");
this.credential = credential;
return this;
}
/**
* Target hub name, which should start with alphabetic characters and only contain alpha-numeric characters or
* underscore.
*
* @param hub Target hub name, which should start with alphabetic characters and only contain alpha-numeric
* characters or underscore.
* @return The updated WebPubSubClientBuilder object.
* @throws NullPointerException If {@code hub} is {@code null}.
*/
public WebPubSubClientBuilder hub(final String hub) {
Objects.requireNonNull(hub, "'hub' cannot be null.");
this.hub = hub;
return this;
}
/**
* Sets the logging configuration for HTTP requests and responses.
*
* <p> If logLevel is not provided, default value of {@link com.azure.core.http.policy.HttpLogDetailLevel
* set.</p>
*
* @param logOptions The logging configuration to use when sending and receiving HTTP requests/responses.
* @return The updated WebPubSubClientBuilder object.
*/
public WebPubSubClientBuilder httpLogOptions(final HttpLogOptions logOptions) {
httpLogOptions = logOptions;
return this;
}
/**
* Adds a policy to the set of existing policies that are executed after required policies.
*
* @param policy The retry policy for service requests.
* @return The updated WebPubSubClientBuilder object.
* @throws NullPointerException If {@code policy} is {@code null}.
*/
public WebPubSubClientBuilder addPolicy(final HttpPipelinePolicy policy) {
Objects.requireNonNull(policy);
policies.add(policy);
return this;
}
/**
* Sets the HTTP client to use for sending and receiving requests to and from the service.
*
* @param client The HTTP client to use for requests.
* @return The updated WebPubSubClientBuilder object.
*/
public WebPubSubClientBuilder httpClient(final HttpClient client) {
if (this.httpClient != null && client == null) {
logger.info("HttpClient is being set to 'null' when it was previously configured.");
}
this.httpClient = client;
return this;
}
/**
* Sets the HTTP pipeline to use for the service client.
*
* If {@code pipeline} is set, all other settings are ignored, aside from
* {@link WebPubSubClientBuilder
* {@link WebPubSubServiceClient}.
*
* @param pipeline The HTTP pipeline to use for sending service requests and receiving responses.
* @return The updated WebPubSubClientBuilder object.
*/
public WebPubSubClientBuilder pipeline(final HttpPipeline pipeline) {
if (this.pipeline != null && pipeline == null) {
logger.info("HttpPipeline is being set to 'null' when it was previously configured.");
}
this.pipeline = pipeline;
return this;
}
/**
* Sets the configuration store that is used during construction of the service client.
*
* The default configuration store is a clone of the {@link Configuration
* configuration store}, use {@link Configuration
*
* @param configuration The configuration store used to
* @return The updated WebPubSubClientBuilder object.
*/
public WebPubSubClientBuilder configuration(final Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Sets the {@link HttpPipelinePolicy} that is used when each request is sent. The default retry policy will be
* used if not provided.
*
* @param retryPolicy user's retry policy applied to each request.
* @return The updated WebPubSubClientBuilder object.
*/
public WebPubSubClientBuilder retryPolicy(final RetryPolicy retryPolicy) {
this.retryPolicy = retryPolicy;
return this;
}
/**
* Sets the {@link WebPubSubServiceVersion} that is used when making API requests.
* <p>
* If a service version is not provided, the service version that will be used will be the latest known service
* version based on the version of the client library being used. If no service version is specified, updating to a
* newer version the client library will have the result of potentially moving to a newer service version.
*
* @param version {@link WebPubSubServiceVersion} of the service to be used when making requests.
* @return The updated WebPubSubClientBuilder object.
*/
public WebPubSubClientBuilder serviceVersion(final WebPubSubServiceVersion version) {
this.version = version;
return this;
}
/**
* Builds an instance of WebPubSubAsyncServiceClient with the provided parameters.
*
* @return an instance of WebPubSubAsyncServiceClient.
*/
private WebPubSubAsyncServiceClient buildAsyncClient(final AzureWebPubSubServiceRestAPIImplBuilder innerBuilder,
final String hub,
final String endpoint,
final WebPubSubAuthenticationPolicy webPubSubAuthPolicy,
final WebPubSubServiceVersion serviceVersion) {
return new WebPubSubAsyncServiceClient(
innerBuilder.buildClient().getWebPubSubs(), hub, endpoint, webPubSubAuthPolicy, serviceVersion);
}
/**
* Builds an instance of WebPubSubServiceClient with the provided parameters.
*
* @return an instance of WebPubSubServiceClient.
*/
public WebPubSubServiceClient buildClient() {
return new WebPubSubServiceClient(buildAsyncClient());
}
private Map<String, String> parseConnectionString(final String cs) {
final String[] params = cs.split(";");
final Map<String, String> connectionStringParams = new HashMap<>();
for (final String param : params) {
final String[] paramSplit = param.split("=", 2);
if (paramSplit.length != 2) {
continue;
}
final String key = paramSplit[0].trim().toLowerCase(Locale.ROOT);
if (connectionStringParams.containsKey(key)) {
logger.logThrowableAsError(new IllegalArgumentException(
"Duplicate connection string key parameter provided for key '" + key + "'"));
}
final String value = paramSplit[1].trim();
connectionStringParams.put(key, value);
}
return connectionStringParams;
}
} |
@sberyozkin I prefer revert it to be honest :) I should ask your opinion before doing this (rookie mistake)! | private void calculate() {
calculated = true;
remoteAddress = delegate.remoteAddress();
scheme = delegate.scheme();
setHostAndPort(delegate.host(), port);
uri = delegate.uri();
String forwardedSsl = delegate.getHeader(X_FORWARDED_SSL);
boolean isForwardedSslOn = forwardedSsl != null && forwardedSsl.equalsIgnoreCase("on");
if (forwardingProxyOptions.allowXForwarded) {
String protocolHeader = delegate.getHeader(X_FORWARDED_PROTO);
if (protocolHeader != null) {
scheme = protocolHeader.split(",")[0];
port = -1;
} else if (isForwardedSslOn) {
scheme = HTTPS_SCHEME;
port = -1;
}
if (forwardingProxyOptions.enableForwardedHost) {
String hostHeader = delegate.getHeader(forwardingProxyOptions.forwardedHostHeader);
if (hostHeader != null) {
setHostAndPort(hostHeader.split(",")[0], port);
}
}
if (forwardingProxyOptions.enableForwardedPrefix) {
String prefixHeader = delegate.getHeader(forwardingProxyOptions.forwardedPrefixHeader);
if (prefixHeader != null) {
uri = appendPrefixToUri(prefixHeader, uri);
}
}
String portHeader = delegate.getHeader(X_FORWARDED_PORT);
if (portHeader != null) {
port = parsePort(portHeader.split(",")[0], port);
}
String forHeader = delegate.getHeader(X_FORWARDED_FOR);
if (forHeader != null) {
remoteAddress = parseFor(forHeader.split(",")[0], remoteAddress.port());
}
}
String forwarded = delegate.getHeader(FORWARDED);
if (forwardingProxyOptions.allowForwarded && forwarded != null) {
String forwardedToUse = forwarded.split(",")[0];
Matcher matcher = FORWARDED_PROTO_PATTERN.matcher(forwardedToUse);
if (matcher.find()) {
scheme = (matcher.group(1).trim());
port = -1;
} else if (isForwardedSslOn) {
scheme = HTTPS_SCHEME;
port = -1;
}
matcher = FORWARDED_HOST_PATTERN.matcher(forwardedToUse);
if (matcher.find()) {
setHostAndPort(matcher.group(1).trim(), port);
}
matcher = FORWARDED_FOR_PATTERN.matcher(forwardedToUse);
if (matcher.find()) {
remoteAddress = parseFor(matcher.group(1).trim(), remoteAddress.port());
}
}
if (((scheme.equals(HTTP_SCHEME) && port == 80) || (scheme.equals(HTTPS_SCHEME) && port == 443))) {
port = -1;
}
host = host + (port >= 0 ? ":" + port : "");
delegate.headers().set(HttpHeaders.HOST, host);
absoluteURI = scheme + ":
log.debug("Recalculated absoluteURI to " + absoluteURI);
} | String forwardedToUse = forwarded.split(",")[0]; | private void calculate() {
calculated = true;
remoteAddress = delegate.remoteAddress();
scheme = delegate.scheme();
setHostAndPort(delegate.host(), port);
uri = delegate.uri();
String forwarded = delegate.getHeader(FORWARDED);
if (forwardingProxyOptions.allowForwarded && forwarded != null) {
String forwardedToUse = forwarded.split(",")[0];
Matcher matcher = FORWARDED_PROTO_PATTERN.matcher(forwardedToUse);
if (matcher.find()) {
scheme = (matcher.group(1).trim());
port = -1;
}
matcher = FORWARDED_HOST_PATTERN.matcher(forwardedToUse);
if (matcher.find()) {
setHostAndPort(matcher.group(1).trim(), port);
}
matcher = FORWARDED_FOR_PATTERN.matcher(forwardedToUse);
if (matcher.find()) {
remoteAddress = parseFor(matcher.group(1).trim(), remoteAddress.port());
}
} else if (forwardingProxyOptions.allowXForwarded) {
String protocolHeader = delegate.getHeader(X_FORWARDED_PROTO);
if (protocolHeader != null) {
scheme = getFirstElement(protocolHeader);
port = -1;
}
String forwardedSsl = delegate.getHeader(X_FORWARDED_SSL);
boolean isForwardedSslOn = forwardedSsl != null && forwardedSsl.equalsIgnoreCase("on");
if (isForwardedSslOn) {
scheme = HTTPS_SCHEME;
port = -1;
}
if (forwardingProxyOptions.enableForwardedHost) {
String hostHeader = delegate.getHeader(forwardingProxyOptions.forwardedHostHeader);
if (hostHeader != null) {
setHostAndPort(getFirstElement(hostHeader), port);
}
}
if (forwardingProxyOptions.enableForwardedPrefix) {
String prefixHeader = delegate.getHeader(forwardingProxyOptions.forwardedPrefixHeader);
if (prefixHeader != null) {
uri = appendPrefixToUri(prefixHeader, uri);
}
}
String portHeader = delegate.getHeader(X_FORWARDED_PORT);
if (portHeader != null) {
port = parsePort(getFirstElement(portHeader), port);
}
String forHeader = delegate.getHeader(X_FORWARDED_FOR);
if (forHeader != null) {
remoteAddress = parseFor(getFirstElement(forHeader), remoteAddress.port());
}
}
if (((scheme.equals(HTTP_SCHEME) && port == 80) || (scheme.equals(HTTPS_SCHEME) && port == 443))) {
port = -1;
}
host = host + (port >= 0 ? ":" + port : "");
delegate.headers().set(HttpHeaders.HOST, host);
absoluteURI = scheme + ":
log.debug("Recalculated absoluteURI to " + absoluteURI);
} | class ForwardedParser {
private static final Logger log = Logger.getLogger(ForwardedParser.class);
private static final String HTTP_SCHEME = "http";
private static final String HTTPS_SCHEME = "https";
private static final AsciiString FORWARDED = AsciiString.cached("Forwarded");
private static final AsciiString X_FORWARDED_SSL = AsciiString.cached("X-Forwarded-Ssl");
private static final AsciiString X_FORWARDED_PROTO = AsciiString.cached("X-Forwarded-Proto");
private static final AsciiString X_FORWARDED_PORT = AsciiString.cached("X-Forwarded-Port");
private static final AsciiString X_FORWARDED_FOR = AsciiString.cached("X-Forwarded-For");
private static final Pattern FORWARDED_HOST_PATTERN = Pattern.compile("host=\"?([^;,\"]+)\"?");
private static final Pattern FORWARDED_PROTO_PATTERN = Pattern.compile("proto=\"?([^;,\"]+)\"?");
private static final Pattern FORWARDED_FOR_PATTERN = Pattern.compile("for=\"?([^;,\"]+)\"?");
private final HttpServerRequest delegate;
private final ForwardingProxyOptions forwardingProxyOptions;
private boolean calculated;
private String host;
private int port = -1;
private String scheme;
private String uri;
private String absoluteURI;
private SocketAddress remoteAddress;
ForwardedParser(HttpServerRequest delegate, ForwardingProxyOptions forwardingProxyOptions) {
this.delegate = delegate;
this.forwardingProxyOptions = forwardingProxyOptions;
}
public String scheme() {
if (!calculated)
calculate();
return scheme;
}
String host() {
if (!calculated)
calculate();
return host;
}
boolean isSSL() {
if (!calculated)
calculate();
return scheme.equals(HTTPS_SCHEME);
}
String absoluteURI() {
if (!calculated)
calculate();
return absoluteURI;
}
SocketAddress remoteAddress() {
if (!calculated)
calculate();
return remoteAddress;
}
String uri() {
if (!calculated)
calculate();
return uri;
}
private void setHostAndPort(String hostToParse, int defaultPort) {
if (hostToParse == null) {
hostToParse = "";
}
String[] hostAndPort = parseHostAndPort(hostToParse);
host = hostAndPort[0];
delegate.headers().set(HttpHeaders.HOST, host);
port = parsePort(hostAndPort[1], defaultPort);
}
private SocketAddress parseFor(String forToParse, int defaultPort) {
String[] hostAndPort = parseHostAndPort(forToParse);
String host = hostAndPort[0];
int port = parsePort(hostAndPort[1], defaultPort);
return new SocketAddressImpl(port, host);
}
/**
* Returns a String[] of 2 elements, with the first being the host and the second the port
*/
private String[] parseHostAndPort(String hostToParse) {
String[] hostAndPort = { hostToParse, "" };
int portSeparatorIdx = hostToParse.lastIndexOf(':');
int squareBracketIdx = hostToParse.lastIndexOf(']');
if ((squareBracketIdx > -1 && portSeparatorIdx > squareBracketIdx)) {
hostAndPort[0] = hostToParse.substring(0, portSeparatorIdx);
hostAndPort[1] = hostToParse.substring(portSeparatorIdx + 1);
} else {
long numberOfColons = hostToParse.chars().filter(ch -> ch == ':').count();
if (numberOfColons == 1 && !hostToParse.endsWith(":")) {
hostAndPort[0] = hostToParse.substring(0, portSeparatorIdx);
hostAndPort[1] = hostToParse.substring(portSeparatorIdx + 1);
}
}
return hostAndPort;
}
private int parsePort(String portToParse, int defaultPort) {
if (portToParse != null && portToParse.length() > 0) {
try {
return Integer.parseInt(portToParse);
} catch (NumberFormatException ignored) {
log.error("Failed to parse a port from \"forwarded\"-type headers.");
}
}
return defaultPort;
}
private String appendPrefixToUri(String prefix, String uri) {
String parsed = stripSlashes(prefix);
return parsed.isEmpty() ? uri : '/' + parsed + uri;
}
private String stripSlashes(String uri) {
String result;
if (!uri.isEmpty()) {
int beginIndex = 0;
if (uri.startsWith("/")) {
beginIndex = 1;
}
int endIndex = uri.length();
if (uri.endsWith("/") && uri.length() > 1) {
endIndex = uri.length() - 1;
}
result = uri.substring(beginIndex, endIndex);
} else {
result = uri;
}
return result;
}
} | class ForwardedParser {
private static final Logger log = Logger.getLogger(ForwardedParser.class);
private static final String HTTP_SCHEME = "http";
private static final String HTTPS_SCHEME = "https";
private static final AsciiString FORWARDED = AsciiString.cached("Forwarded");
private static final AsciiString X_FORWARDED_SSL = AsciiString.cached("X-Forwarded-Ssl");
private static final AsciiString X_FORWARDED_PROTO = AsciiString.cached("X-Forwarded-Proto");
private static final AsciiString X_FORWARDED_PORT = AsciiString.cached("X-Forwarded-Port");
private static final AsciiString X_FORWARDED_FOR = AsciiString.cached("X-Forwarded-For");
private static final Pattern FORWARDED_HOST_PATTERN = Pattern.compile("host=\"?([^;,\"]+)\"?");
private static final Pattern FORWARDED_PROTO_PATTERN = Pattern.compile("proto=\"?([^;,\"]+)\"?");
private static final Pattern FORWARDED_FOR_PATTERN = Pattern.compile("for=\"?([^;,\"]+)\"?");
private final HttpServerRequest delegate;
private final ForwardingProxyOptions forwardingProxyOptions;
private boolean calculated;
private String host;
private int port = -1;
private String scheme;
private String uri;
private String absoluteURI;
private SocketAddress remoteAddress;
ForwardedParser(HttpServerRequest delegate, ForwardingProxyOptions forwardingProxyOptions) {
this.delegate = delegate;
this.forwardingProxyOptions = forwardingProxyOptions;
}
public String scheme() {
if (!calculated)
calculate();
return scheme;
}
String host() {
if (!calculated)
calculate();
return host;
}
boolean isSSL() {
if (!calculated)
calculate();
return scheme.equals(HTTPS_SCHEME);
}
String absoluteURI() {
if (!calculated)
calculate();
return absoluteURI;
}
SocketAddress remoteAddress() {
if (!calculated)
calculate();
return remoteAddress;
}
String uri() {
if (!calculated)
calculate();
return uri;
}
private void setHostAndPort(String hostToParse, int defaultPort) {
if (hostToParse == null) {
hostToParse = "";
}
String[] hostAndPort = parseHostAndPort(hostToParse);
host = hostAndPort[0];
delegate.headers().set(HttpHeaders.HOST, host);
port = parsePort(hostAndPort[1], defaultPort);
}
private SocketAddress parseFor(String forToParse, int defaultPort) {
String[] hostAndPort = parseHostAndPort(forToParse);
String host = hostAndPort[0];
int port = parsePort(hostAndPort[1], defaultPort);
return new SocketAddressImpl(port, host);
}
private String getFirstElement(String value) {
int index = value.indexOf(',');
return index == -1 ? value : value.substring(0, index);
}
/**
* Returns a String[] of 2 elements, with the first being the host and the second the port
*/
private String[] parseHostAndPort(String hostToParse) {
String[] hostAndPort = { hostToParse, "" };
int portSeparatorIdx = hostToParse.lastIndexOf(':');
int squareBracketIdx = hostToParse.lastIndexOf(']');
if ((squareBracketIdx > -1 && portSeparatorIdx > squareBracketIdx)) {
hostAndPort[0] = hostToParse.substring(0, portSeparatorIdx);
hostAndPort[1] = hostToParse.substring(portSeparatorIdx + 1);
} else {
long numberOfColons = hostToParse.chars().filter(ch -> ch == ':').count();
if (numberOfColons == 1 && !hostToParse.endsWith(":")) {
hostAndPort[0] = hostToParse.substring(0, portSeparatorIdx);
hostAndPort[1] = hostToParse.substring(portSeparatorIdx + 1);
}
}
return hostAndPort;
}
private int parsePort(String portToParse, int defaultPort) {
if (portToParse != null && portToParse.length() > 0) {
try {
return Integer.parseInt(portToParse);
} catch (NumberFormatException ignored) {
log.error("Failed to parse a port from \"forwarded\"-type headers.");
}
}
return defaultPort;
}
private String appendPrefixToUri(String prefix, String uri) {
String parsed = stripSlashes(prefix);
return parsed.isEmpty() ? uri : '/' + parsed + uri;
}
private String stripSlashes(String uri) {
String result;
if (!uri.isEmpty()) {
int beginIndex = 0;
if (uri.startsWith("/")) {
beginIndex = 1;
}
int endIndex = uri.length();
if (uri.endsWith("/") && uri.length() > 1) {
endIndex = uri.length() - 1;
}
result = uri.substring(beginIndex, endIndex);
} else {
result = uri;
}
return result;
}
} |
Will a warning message be enough here? Should we reject the job? (I should comment on this early. Sorry that I forgot to do so : ( ) | void maybeRecordPCollectionWithAutoSharding(PCollection<?> pcol) {
if (!options.isEnableStreamingEngine()) {
LOG.warn(
"Runner determined sharding not available in Dataflow for GroupIntoBatches for "
+ "non-Streaming Engine jobs. Default sharding will be applied.");
return;
}
if (!hasExperiment(options, "enable_streaming_auto_sharding")) {
LOG.warn(
"Runner determined sharding not enabled in Dataflow for GroupIntoBatches for Streaming "
+ "Engine jobs: --enable_streaming_auto_sharding=false. Default sharding will be "
+ "applied.");
return;
}
if (hasExperiment(options, "beam_fn_api") && !hasExperiment(options, "use_runner_v2")) {
LOG.warn(
"Runner determined sharding not available in Dataflow for GroupIntoBatches for portable "
+ "jobs not using runner v2: --beam_fn_api=true, --use_runner_v2=false. Default "
+ "sharding will be applied.");
return;
}
pcollectionsRequiringAutoSharding.add(pcol);
} | return; | void maybeRecordPCollectionWithAutoSharding(PCollection<?> pcol) {
checkArgument(
options.isEnableStreamingEngine(),
"Runner determined sharding not available in Dataflow for GroupIntoBatches for"
+ " non-Streaming-Engine jobs.");
checkArgument(
hasExperiment(options, "enable_streaming_auto_sharding"),
"Runner determined sharding not enabled in Dataflow for GroupIntoBatches."
+ " Try adding the experiment: --experiments=enable_streaming_auto_sharding.");
boolean useJRH = hasExperiment(options, "beam_fn_api") && !useUnifiedWorker(options);
checkArgument(
!useJRH,
"Runner determined sharding not available in Dataflow for GroupIntoBatches for portable"
+ " jobs not using runner v2. Try adding the experiment --experiments=use_runner_v2.");
pcollectionsRequiringAutoSharding.add(pcol);
} | class BoundednessVisitor extends PipelineVisitor.Defaults {
IsBounded boundedness = IsBounded.BOUNDED;
@Override
public void visitValue(PValue value, Node producer) {
if (value instanceof PCollection) {
boundedness = boundedness.and(((PCollection) value).isBounded());
}
}
} | class BoundednessVisitor extends PipelineVisitor.Defaults {
IsBounded boundedness = IsBounded.BOUNDED;
@Override
public void visitValue(PValue value, Node producer) {
if (value instanceof PCollection) {
boundedness = boundedness.and(((PCollection) value).isBounded());
}
}
} |
That doesn't matter I think | void execBatchPlanFragment(TExecPlanFragmentParams commonParams, TExecPlanFragmentParams uniqueParams) {
TReportExecStatusParams report = new TReportExecStatusParams();
report.setProtocol_version(FrontendServiceVersion.V1);
report.setQuery_id(commonParams.params.query_id);
report.setBackend_num(uniqueParams.backend_num);
report.setBackend_id(be.getId());
report.setFragment_instance_id(uniqueParams.params.fragment_instance_id);
report.setDone(true);
report.setStatus(new TStatus(TStatusCode.OK));
for (TPlanNode planNode : commonParams.fragment.plan.nodes) {
if (planNode.node_type == TPlanNodeType.OLAP_SCAN_NODE) {
runOlapScan(report, planNode, uniqueParams.params.per_node_scan_ranges.get(planNode.getNode_id()));
}
}
if (uniqueParams.fragment.output_sink != null) {
TDataSink tDataSink = uniqueParams.fragment.output_sink;
runSink(report, tDataSink);
} else if (commonParams.fragment.output_sink != null) {
TDataSink tDataSink = commonParams.fragment.output_sink;
runSink(report, tDataSink);
}
try {
TReportExecStatusResult ret = frontendService.reportExecStatus(report);
if (ret.status.status_code != TStatusCode.OK) {
LOG.warn("error report exec status " + (ret.status.error_msgs.isEmpty() ? "" : ret.status.error_msgs.get(0)));
}
} catch (TException e) {
LOG.warn("error report exec status", e);
}
} | if (planNode.node_type == TPlanNodeType.OLAP_SCAN_NODE) { | void execBatchPlanFragment(TExecPlanFragmentParams commonParams, TExecPlanFragmentParams uniqueParams) {
TReportExecStatusParams report = new TReportExecStatusParams();
report.setProtocol_version(FrontendServiceVersion.V1);
report.setQuery_id(commonParams.params.query_id);
report.setBackend_num(uniqueParams.backend_num);
report.setBackend_id(be.getId());
report.setFragment_instance_id(uniqueParams.params.fragment_instance_id);
report.setDone(true);
report.setStatus(new TStatus(TStatusCode.OK));
for (TPlanNode planNode : commonParams.fragment.plan.nodes) {
if (planNode.node_type == TPlanNodeType.OLAP_SCAN_NODE) {
runOlapScan(report, planNode, uniqueParams.params.per_node_scan_ranges.get(planNode.getNode_id()));
}
}
if (uniqueParams.fragment.output_sink != null) {
TDataSink tDataSink = uniqueParams.fragment.output_sink;
runSink(report, tDataSink);
} else if (commonParams.fragment.output_sink != null) {
TDataSink tDataSink = commonParams.fragment.output_sink;
runSink(report, tDataSink);
}
try {
TReportExecStatusResult ret = frontendService.reportExecStatus(report);
if (ret.status.status_code != TStatusCode.OK) {
LOG.warn("error report exec status " + (ret.status.error_msgs.isEmpty() ? "" : ret.status.error_msgs.get(0)));
}
} catch (TException e) {
LOG.warn("error report exec status", e);
}
} | class PseudoPBackendService implements PBackendServiceAsync {
private final ExecutorService executor = Executors.newSingleThreadExecutor();
PseudoPBackendService() {
executor.submit(() -> {
currentBackend.set(PseudoBackend.this);
});
}
@Override
public PExecPlanFragmentResult execPlanFragment(PExecPlanFragmentRequest request) {
PExecPlanFragmentResult result = new PExecPlanFragmentResult();
StatusPB pStatus = new StatusPB();
pStatus.statusCode = 0;
result.status = pStatus;
return result;
}
@Override
public PExecBatchPlanFragmentsResult execBatchPlanFragments(PExecBatchPlanFragmentsRequest request) {
PExecBatchPlanFragmentsResult result = new PExecBatchPlanFragmentsResult();
StatusPB pStatus = new StatusPB();
pStatus.statusCode = 0;
result.status = pStatus;
return result;
}
@Override
public PCancelPlanFragmentResult cancelPlanFragment(PCancelPlanFragmentRequest request) {
PCancelPlanFragmentResult result = new PCancelPlanFragmentResult();
StatusPB pStatus = new StatusPB();
pStatus.statusCode = 0;
result.status = pStatus;
return result;
}
@Override
public PFetchDataResult fetchData(PFetchDataRequest request) {
PFetchDataResult result = new PFetchDataResult();
StatusPB pStatus = new StatusPB();
pStatus.statusCode = 0;
PQueryStatistics pQueryStatistics = new PQueryStatistics();
pQueryStatistics.scanRows = 0L;
pQueryStatistics.scanBytes = 0L;
pQueryStatistics.cpuCostNs = 0L;
pQueryStatistics.memCostBytes = 0L;
result.status = pStatus;
result.packetSeq = 0L;
result.queryStatistics = pQueryStatistics;
result.eos = true;
return result;
}
@Override
public PTriggerProfileReportResult triggerProfileReport(PTriggerProfileReportRequest request) {
return null;
}
@Override
public PProxyResult getInfo(PProxyRequest request) {
return null;
}
@Override
public Future<PExecPlanFragmentResult> execPlanFragment(
PExecPlanFragmentRequest request, RpcCallback<PExecPlanFragmentResult> callback) {
TDeserializer deserializer = new TDeserializer(new TBinaryProtocol.Factory());
final TExecPlanFragmentParams params = new TExecPlanFragmentParams();
PExecPlanFragmentResult result = new PExecPlanFragmentResult();
result.status = new StatusPB();
result.status.statusCode = 0;
try {
RpcContext rpcContext = RpcContext.getContext();
ByteBuf buf = rpcContext.getRequestBinaryAttachment();
byte[] serialRequest = new byte[buf.readableBytes()];
buf.readBytes(serialRequest);
deserializer.deserialize(params, serialRequest);
} catch (TException e) {
LOG.warn("error deserialize request", e);
result.status.statusCode = TStatusCode.INTERNAL_ERROR.getValue();
result.status.errorMsgs = Lists.newArrayList(e.getMessage());
return CompletableFuture.completedFuture(result);
}
executor.submit(() -> {
try {
execPlanFragmentWithReport(params);
} catch (Exception e) {
LOG.warn("error execPlanFragment", e);
}
});
return CompletableFuture.completedFuture(result);
}
@Override
public Future<PExecBatchPlanFragmentsResult> execBatchPlanFragments(
PExecBatchPlanFragmentsRequest request, RpcCallback<PExecBatchPlanFragmentsResult> callback) {
TDeserializer deserializer = new TDeserializer(new TBinaryProtocol.Factory());
final TExecBatchPlanFragmentsParams params = new TExecBatchPlanFragmentsParams();
PExecBatchPlanFragmentsResult result = new PExecBatchPlanFragmentsResult();
result.status = new StatusPB();
result.status.statusCode = 0;
try {
RpcContext rpcContext = RpcContext.getContext();
ByteBuf buf = rpcContext.getRequestBinaryAttachment();
byte[] serialRequest = new byte[buf.readableBytes()];
buf.readBytes(serialRequest);
deserializer.deserialize(params, serialRequest);
} catch (TException e) {
LOG.warn("error deserialize request", e);
result.status.statusCode = TStatusCode.INTERNAL_ERROR.getValue();
result.status.errorMsgs = Lists.newArrayList(e.getMessage());
return CompletableFuture.completedFuture(result);
}
executor.submit(() -> {
try {
execBatchPlanFragmentsWithReport(params);
} catch (Exception e) {
LOG.warn("error execBatchPlanFragments", e);
}
});
return CompletableFuture.completedFuture(result);
}
@Override
public Future<PCancelPlanFragmentResult> cancelPlanFragment(
PCancelPlanFragmentRequest request, RpcCallback<PCancelPlanFragmentResult> callback) {
return executor.submit(() -> {
PCancelPlanFragmentResult result = new PCancelPlanFragmentResult();
StatusPB pStatus = new StatusPB();
pStatus.statusCode = 0;
result.status = pStatus;
return result;
});
}
@Override
public Future<PFetchDataResult> fetchData(PFetchDataRequest request, RpcCallback<PFetchDataResult> callback) {
return executor.submit(() -> {
PFetchDataResult result = new PFetchDataResult();
StatusPB pStatus = new StatusPB();
pStatus.statusCode = 0;
PQueryStatistics pQueryStatistics = new PQueryStatistics();
pQueryStatistics.scanRows = 0L;
pQueryStatistics.scanBytes = 0L;
pQueryStatistics.cpuCostNs = 0L;
pQueryStatistics.memCostBytes = 0L;
result.status = pStatus;
result.packetSeq = 0L;
result.queryStatistics = pQueryStatistics;
result.eos = true;
return result;
});
}
@Override
public Future<PTriggerProfileReportResult> triggerProfileReport(
PTriggerProfileReportRequest request, RpcCallback<PTriggerProfileReportResult> callback) {
return null;
}
@Override
public Future<PProxyResult> getInfo(PProxyRequest request, RpcCallback<PProxyResult> callback) {
return null;
}
} | class PseudoPBackendService implements PBackendServiceAsync {
private final ExecutorService executor = Executors.newSingleThreadExecutor();
PseudoPBackendService() {
executor.submit(() -> {
currentBackend.set(PseudoBackend.this);
});
}
@Override
public PExecPlanFragmentResult execPlanFragment(PExecPlanFragmentRequest request) {
PExecPlanFragmentResult result = new PExecPlanFragmentResult();
StatusPB pStatus = new StatusPB();
pStatus.statusCode = 0;
result.status = pStatus;
return result;
}
@Override
public PExecBatchPlanFragmentsResult execBatchPlanFragments(PExecBatchPlanFragmentsRequest request) {
PExecBatchPlanFragmentsResult result = new PExecBatchPlanFragmentsResult();
StatusPB pStatus = new StatusPB();
pStatus.statusCode = 0;
result.status = pStatus;
return result;
}
@Override
public PCancelPlanFragmentResult cancelPlanFragment(PCancelPlanFragmentRequest request) {
PCancelPlanFragmentResult result = new PCancelPlanFragmentResult();
StatusPB pStatus = new StatusPB();
pStatus.statusCode = 0;
result.status = pStatus;
return result;
}
@Override
public PFetchDataResult fetchData(PFetchDataRequest request) {
PFetchDataResult result = new PFetchDataResult();
StatusPB pStatus = new StatusPB();
pStatus.statusCode = 0;
PQueryStatistics pQueryStatistics = new PQueryStatistics();
pQueryStatistics.scanRows = 0L;
pQueryStatistics.scanBytes = 0L;
pQueryStatistics.cpuCostNs = 0L;
pQueryStatistics.memCostBytes = 0L;
result.status = pStatus;
result.packetSeq = 0L;
result.queryStatistics = pQueryStatistics;
result.eos = true;
return result;
}
@Override
public PTriggerProfileReportResult triggerProfileReport(PTriggerProfileReportRequest request) {
return null;
}
@Override
public PProxyResult getInfo(PProxyRequest request) {
return null;
}
@Override
public Future<PExecPlanFragmentResult> execPlanFragment(
PExecPlanFragmentRequest request, RpcCallback<PExecPlanFragmentResult> callback) {
TDeserializer deserializer = new TDeserializer(new TBinaryProtocol.Factory());
final TExecPlanFragmentParams params = new TExecPlanFragmentParams();
PExecPlanFragmentResult result = new PExecPlanFragmentResult();
result.status = new StatusPB();
result.status.statusCode = 0;
try {
RpcContext rpcContext = RpcContext.getContext();
ByteBuf buf = rpcContext.getRequestBinaryAttachment();
byte[] serialRequest = new byte[buf.readableBytes()];
buf.readBytes(serialRequest);
deserializer.deserialize(params, serialRequest);
} catch (TException e) {
LOG.warn("error deserialize request", e);
result.status.statusCode = TStatusCode.INTERNAL_ERROR.getValue();
result.status.errorMsgs = Lists.newArrayList(e.getMessage());
return CompletableFuture.completedFuture(result);
}
executor.submit(() -> {
try {
execPlanFragmentWithReport(params);
} catch (Exception e) {
LOG.warn("error execPlanFragment", e);
}
});
return CompletableFuture.completedFuture(result);
}
@Override
public Future<PExecBatchPlanFragmentsResult> execBatchPlanFragments(
PExecBatchPlanFragmentsRequest request, RpcCallback<PExecBatchPlanFragmentsResult> callback) {
TDeserializer deserializer = new TDeserializer(new TBinaryProtocol.Factory());
final TExecBatchPlanFragmentsParams params = new TExecBatchPlanFragmentsParams();
PExecBatchPlanFragmentsResult result = new PExecBatchPlanFragmentsResult();
result.status = new StatusPB();
result.status.statusCode = 0;
try {
RpcContext rpcContext = RpcContext.getContext();
ByteBuf buf = rpcContext.getRequestBinaryAttachment();
byte[] serialRequest = new byte[buf.readableBytes()];
buf.readBytes(serialRequest);
deserializer.deserialize(params, serialRequest);
} catch (TException e) {
LOG.warn("error deserialize request", e);
result.status.statusCode = TStatusCode.INTERNAL_ERROR.getValue();
result.status.errorMsgs = Lists.newArrayList(e.getMessage());
return CompletableFuture.completedFuture(result);
}
executor.submit(() -> {
try {
execBatchPlanFragmentsWithReport(params);
} catch (Exception e) {
LOG.warn("error execBatchPlanFragments", e);
}
});
return CompletableFuture.completedFuture(result);
}
@Override
public Future<PCancelPlanFragmentResult> cancelPlanFragment(
PCancelPlanFragmentRequest request, RpcCallback<PCancelPlanFragmentResult> callback) {
return executor.submit(() -> {
PCancelPlanFragmentResult result = new PCancelPlanFragmentResult();
StatusPB pStatus = new StatusPB();
pStatus.statusCode = 0;
result.status = pStatus;
return result;
});
}
@Override
public Future<PFetchDataResult> fetchData(PFetchDataRequest request, RpcCallback<PFetchDataResult> callback) {
return executor.submit(() -> {
PFetchDataResult result = new PFetchDataResult();
StatusPB pStatus = new StatusPB();
pStatus.statusCode = 0;
PQueryStatistics pQueryStatistics = new PQueryStatistics();
pQueryStatistics.scanRows = 0L;
pQueryStatistics.scanBytes = 0L;
pQueryStatistics.cpuCostNs = 0L;
pQueryStatistics.memCostBytes = 0L;
result.status = pStatus;
result.packetSeq = 0L;
result.queryStatistics = pQueryStatistics;
result.eos = true;
return result;
});
}
@Override
public Future<PTriggerProfileReportResult> triggerProfileReport(
PTriggerProfileReportRequest request, RpcCallback<PTriggerProfileReportResult> callback) {
return null;
}
@Override
public Future<PProxyResult> getInfo(PProxyRequest request, RpcCallback<PProxyResult> callback) {
return null;
}
} |
I don't get this. Please add cases. | public RowData readRecord(RowData reuse, byte[] bytes, int offset, int numBytes) throws IOException {
GenericRowData returnRecord = null;
do {
if (this.getDelimiter() != null && this.getDelimiter().length == 1
&& this.getDelimiter()[0] == NEW_LINE && offset + numBytes >= 1
&& bytes[offset + numBytes - 1] == CARRIAGE_RETURN){
numBytes -= 1;
}
byte[] trimBytes = Arrays.copyOfRange(bytes, offset, offset + numBytes);
GenericRowData jsonRow = (GenericRowData) deserializationSchema.deserialize(trimBytes);
if (jsonRow != null) {
returnRecord = rowData;
for (int i = 0; i < jsonSelectFieldToJsonFieldMapping.length; i++) {
returnRecord.setField(jsonSelectFieldToProjectFieldMapping[i],
jsonRow.getField(jsonSelectFieldToJsonFieldMapping[i]));
}
}
} while (returnRecord == null && !reachedEnd());
emitted++;
return returnRecord;
} | } while (returnRecord == null && !reachedEnd()); | public RowData readRecord(RowData reuse, byte[] bytes, int offset, int numBytes) throws IOException {
if (this.getDelimiter() != null && this.getDelimiter().length == 1
&& this.getDelimiter()[0] == NEW_LINE && offset + numBytes >= 1
&& bytes[offset + numBytes - 1] == CARRIAGE_RETURN) {
numBytes -= 1;
}
byte[] trimBytes = Arrays.copyOfRange(bytes, offset, offset + numBytes);
GenericRowData jsonRow = (GenericRowData) deserializationSchema.deserialize(trimBytes);
if (jsonRow == null) {
return null;
}
GenericRowData returnRecord = rowData;
for (int i = 0; i < jsonSelectFieldToJsonFieldMapping.length; i++) {
returnRecord.setField(jsonSelectFieldToProjectFieldMapping[i],
jsonRow.getField(jsonSelectFieldToJsonFieldMapping[i]));
}
emitted++;
return returnRecord;
} | class JsonInputFormat extends DelimitedInputFormat<RowData> {
/**
* Code of \r, used to remove \r from a line when the line ends with \r\n.
*/
private static final byte CARRIAGE_RETURN = (byte) '\r';
/**
* Code of \n, used to identify if \n is used as delimiter.
*/
private static final byte NEW_LINE = (byte) '\n';
private final DataType[] fieldTypes;
private final String[] fieldNames;
private final int[] selectFields;
private final List<String> partitionKeys;
private final String defaultPartValue;
private final long limit;
private final int[] jsonSelectFieldToProjectFieldMapping;
private final int[] jsonSelectFieldToJsonFieldMapping;
private final JsonRowDataDeserializationSchema deserializationSchema;
private transient boolean end;
private transient long emitted;
private transient GenericRowData rowData;
public JsonInputFormat(
Path[] filePaths,
DataType[] fieldTypes,
String[] fieldNames,
int[] selectFields,
List<String> partitionKeys,
String defaultPartValue,
long limit,
int[] jsonSelectFieldToProjectFieldMapping,
int[] jsonSelectFieldToJsonFieldMapping,
JsonRowDataDeserializationSchema deserializationSchema) {
super.setFilePaths(filePaths);
this.fieldTypes = fieldTypes;
this.fieldNames = fieldNames;
this.selectFields = selectFields;
this.partitionKeys = partitionKeys;
this.defaultPartValue = defaultPartValue;
this.limit = limit;
this.jsonSelectFieldToProjectFieldMapping = jsonSelectFieldToProjectFieldMapping;
this.jsonSelectFieldToJsonFieldMapping = jsonSelectFieldToJsonFieldMapping;
this.deserializationSchema = deserializationSchema;
}
@Override
public boolean supportsMultiPaths() {
return true;
}
@Override
public void open(FileInputSplit split) throws IOException {
super.open(split);
this.end = false;
this.emitted = 0L;
this.rowData = PartitionPathUtils.fillPartitionValueForRecord(fieldNames, fieldTypes, selectFields,
partitionKeys, currentSplit.getPath(), defaultPartValue);
}
@Override
public boolean reachedEnd() {
return emitted >= limit || end;
}
@Override
} | class JsonInputFormat extends DelimitedInputFormat<RowData> {
/**
* Code of \r, used to remove \r from a line when the line ends with \r\n.
*/
private static final byte CARRIAGE_RETURN = (byte) '\r';
/**
* Code of \n, used to identify if \n is used as delimiter.
*/
private static final byte NEW_LINE = (byte) '\n';
private final DataType[] fieldTypes;
private final String[] fieldNames;
private final int[] selectFields;
private final List<String> partitionKeys;
private final String defaultPartValue;
private final long limit;
private final int[] jsonSelectFieldToProjectFieldMapping;
private final int[] jsonSelectFieldToJsonFieldMapping;
private final JsonRowDataDeserializationSchema deserializationSchema;
private transient boolean end;
private transient long emitted;
private transient GenericRowData rowData;
public JsonInputFormat(
Path[] filePaths,
DataType[] fieldTypes,
String[] fieldNames,
int[] selectFields,
List<String> partitionKeys,
String defaultPartValue,
long limit,
int[] jsonSelectFieldToProjectFieldMapping,
int[] jsonSelectFieldToJsonFieldMapping,
JsonRowDataDeserializationSchema deserializationSchema) {
super.setFilePaths(filePaths);
this.fieldTypes = fieldTypes;
this.fieldNames = fieldNames;
this.selectFields = selectFields;
this.partitionKeys = partitionKeys;
this.defaultPartValue = defaultPartValue;
this.limit = limit;
this.jsonSelectFieldToProjectFieldMapping = jsonSelectFieldToProjectFieldMapping;
this.jsonSelectFieldToJsonFieldMapping = jsonSelectFieldToJsonFieldMapping;
this.deserializationSchema = deserializationSchema;
}
@Override
public boolean supportsMultiPaths() {
return true;
}
@Override
public void open(FileInputSplit split) throws IOException {
super.open(split);
this.end = false;
this.emitted = 0L;
this.rowData = PartitionPathUtils.fillPartitionValueForRecord(fieldNames, fieldTypes, selectFields,
partitionKeys, currentSplit.getPath(), defaultPartValue);
}
@Override
public boolean reachedEnd() {
return emitted >= limit || end;
}
@Override
@Override
public RowData nextRecord(RowData record) throws IOException {
while (true) {
if (readLine()) {
RowData row = readRecord(record, this.currBuffer, this.currOffset, this.currLen);
if (row == null) {
continue;
} else {
return row;
}
} else {
this.end = true;
return null;
}
}
}
} |
sure, I agree that we can do the overall test cleanup as a separate issue if it is too much for this PR. | public void testFailingAllocationFailsRemappedPendingSlotRequests() throws Exception {
final List<AllocationID> allocations = new ArrayList<>();
resourceManagerGateway.setRequestSlotConsumer(slotRequest -> allocations.add(slotRequest.getAllocationId()));
try (SlotPoolImpl slotPool = setUpSlotPool()) {
final CompletableFuture<PhysicalSlot> slotFuture1 = requestNewAllocatedSlot(slotPool, new SlotRequestId());
final CompletableFuture<PhysicalSlot> slotFuture2 = requestNewAllocatedSlot(slotPool, new SlotRequestId());
final AllocationID allocationId1 = allocations.get(0);
final AllocationID allocationId2 = allocations.get(1);
final TaskManagerLocation location = new LocalTaskManagerLocation();
final SlotOffer slotOffer = new SlotOffer(allocationId2, 0, ResourceProfile.ANY);
slotPool.registerTaskManager(location.getResourceID());
slotPool.offerSlot(location, new SimpleAckingTaskManagerGateway(), slotOffer);
assertThat(slotFuture1.isDone(), is(true));
assertThat(slotFuture2.isDone(), is(false));
final FlinkException cause = new FlinkException("Fail pending slot request failure.");
final Optional<ResourceID> responseFuture = slotPool.failAllocation(allocationId1, cause);
assertThat(responseFuture.isPresent(), is(false));
try {
slotFuture2.getNow(null);
fail("Expected a slot allocation failure.");
} catch (Throwable t) {
assertThat(ExceptionUtils.stripCompletionException(t), equalTo(cause));
}
}
} | try (SlotPoolImpl slotPool = setUpSlotPool()) { | public void testFailingAllocationFailsRemappedPendingSlotRequests() throws Exception {
final List<AllocationID> allocations = new ArrayList<>();
resourceManagerGateway.setRequestSlotConsumer(slotRequest -> allocations.add(slotRequest.getAllocationId()));
try (SlotPoolImpl slotPool = setUpSlotPool()) {
final CompletableFuture<PhysicalSlot> slotFuture1 = requestNewAllocatedSlot(slotPool, new SlotRequestId());
final CompletableFuture<PhysicalSlot> slotFuture2 = requestNewAllocatedSlot(slotPool, new SlotRequestId());
final AllocationID allocationId1 = allocations.get(0);
final AllocationID allocationId2 = allocations.get(1);
final TaskManagerLocation location = new LocalTaskManagerLocation();
final SlotOffer slotOffer = new SlotOffer(allocationId2, 0, ResourceProfile.ANY);
slotPool.registerTaskManager(location.getResourceID());
slotPool.offerSlot(location, new SimpleAckingTaskManagerGateway(), slotOffer);
assertThat(slotFuture1.isDone(), is(true));
assertThat(slotFuture2.isDone(), is(false));
final FlinkException cause = new FlinkException("Fail pending slot request failure.");
final Optional<ResourceID> responseFuture = slotPool.failAllocation(allocationId1, cause);
assertThat(responseFuture.isPresent(), is(false));
try {
slotFuture2.getNow(null);
fail("Expected a slot allocation failure.");
} catch (Throwable t) {
assertThat(ExceptionUtils.stripCompletionException(t), equalTo(cause));
}
}
} | class SlotPoolPendingRequestFailureTest extends TestLogger {
private static final JobID jobId = new JobID();
private static final ComponentMainThreadExecutor mainThreadExecutor = ComponentMainThreadExecutorServiceAdapter.forMainThread();
public static final Time TIMEOUT = Time.seconds(10L);
private TestingResourceManagerGateway resourceManagerGateway;
@Before
public void setup() {
resourceManagerGateway = new TestingResourceManagerGateway();
}
/**
* Tests that failing an allocation fails the pending slot request.
*/
@Test
public void testFailingAllocationFailsPendingSlotRequests() throws Exception {
final CompletableFuture<AllocationID> allocationIdFuture = new CompletableFuture<>();
resourceManagerGateway.setRequestSlotConsumer(slotRequest -> allocationIdFuture.complete(slotRequest.getAllocationId()));
try (SlotPoolImpl slotPool = setUpSlotPool()) {
final CompletableFuture<PhysicalSlot> slotFuture = requestNewAllocatedSlot(slotPool, new SlotRequestId());
final AllocationID allocationId = allocationIdFuture.get();
assertThat(slotFuture.isDone(), is(false));
final FlinkException cause = new FlinkException("Fail pending slot request failure.");
final Optional<ResourceID> responseFuture = slotPool.failAllocation(allocationId, cause);
assertThat(responseFuture.isPresent(), is(false));
try {
slotFuture.get();
fail("Expected a slot allocation failure.");
} catch (ExecutionException ee) {
assertThat(ExceptionUtils.stripExecutionException(ee), equalTo(cause));
}
}
}
@Test
/**
* Tests that a failing resource manager request fails a pending slot request and cancels the slot
* request at the RM (e.g. due to a TimeoutException).
*
* <p>See FLINK-7870
*/
@Test
public void testFailingResourceManagerRequestFailsPendingSlotRequestAndCancelsRMRequest() throws Exception {
try (SlotPoolImpl slotPool = setUpSlotPool()) {
final CompletableFuture<Acknowledge> requestSlotFuture = new CompletableFuture<>();
final CompletableFuture<AllocationID> cancelSlotFuture = new CompletableFuture<>();
final CompletableFuture<AllocationID> requestSlotFutureAllocationId = new CompletableFuture<>();
resourceManagerGateway.setRequestSlotFuture(requestSlotFuture);
resourceManagerGateway.setRequestSlotConsumer(slotRequest -> requestSlotFutureAllocationId.complete(slotRequest.getAllocationId()));
resourceManagerGateway.setCancelSlotConsumer(cancelSlotFuture::complete);
final CompletableFuture<PhysicalSlot> slotFuture = requestNewAllocatedSlot(slotPool, new SlotRequestId());
requestSlotFuture.completeExceptionally(new FlinkException("Testing exception."));
try {
slotFuture.get();
fail("The slot future should not have been completed properly.");
} catch (Exception ignored) {
}
assertEquals(requestSlotFutureAllocationId.get(), cancelSlotFuture.get());
}
}
/**
* Tests that a pending slot request is failed with a timeout.
*/
@Test
public void testPendingSlotRequestTimeout() throws Exception {
final ScheduledExecutorService singleThreadExecutor = Executors.newSingleThreadScheduledExecutor();
final ComponentMainThreadExecutor componentMainThreadExecutor = ComponentMainThreadExecutorServiceAdapter.forSingleThreadExecutor(singleThreadExecutor);
final SlotPoolImpl slotPool = setUpSlotPool(componentMainThreadExecutor);
try {
final Time timeout = Time.milliseconds(5L);
final CompletableFuture<PhysicalSlot> slotFuture = CompletableFuture
.supplyAsync(() -> requestNewAllocatedSlot(slotPool, new SlotRequestId(), timeout), componentMainThreadExecutor)
.thenCompose(Function.identity());
try {
slotFuture.get();
fail("Expected that the future completes with a TimeoutException.");
} catch (ExecutionException ee) {
assertThat(ExceptionUtils.stripExecutionException(ee), instanceOf(TimeoutException.class));
}
} finally {
CompletableFuture.runAsync(ThrowingRunnable.unchecked(slotPool::close), componentMainThreadExecutor).get();
singleThreadExecutor.shutdownNow();
}
}
private CompletableFuture<PhysicalSlot> requestNewAllocatedSlot(SlotPoolImpl slotPool, SlotRequestId slotRequestId) {
return requestNewAllocatedSlot(slotPool, slotRequestId, TIMEOUT);
}
private CompletableFuture<PhysicalSlot> requestNewAllocatedSlot(SlotPoolImpl slotPool, SlotRequestId slotRequestId, Time timeout) {
return slotPool.requestNewAllocatedSlot(slotRequestId, ResourceProfile.UNKNOWN, timeout);
}
private SlotPoolImpl setUpSlotPool() throws Exception {
return setUpSlotPool(mainThreadExecutor);
}
private SlotPoolImpl setUpSlotPool(ComponentMainThreadExecutor componentMainThreadExecutor) throws Exception {
final SlotPoolImpl slotPool = new TestingSlotPoolImpl(jobId);
slotPool.start(JobMasterId.generate(), "foobar", componentMainThreadExecutor);
slotPool.connectToResourceManager(resourceManagerGateway);
return slotPool;
}
} | class SlotPoolPendingRequestFailureTest extends TestLogger {
private static final JobID jobId = new JobID();
private static final ComponentMainThreadExecutor mainThreadExecutor = ComponentMainThreadExecutorServiceAdapter.forMainThread();
public static final Time TIMEOUT = Time.seconds(10L);
private TestingResourceManagerGateway resourceManagerGateway;
@Before
public void setup() {
resourceManagerGateway = new TestingResourceManagerGateway();
}
/**
* Tests that failing an allocation fails the pending slot request.
*/
@Test
public void testFailingAllocationFailsPendingSlotRequests() throws Exception {
final CompletableFuture<AllocationID> allocationIdFuture = new CompletableFuture<>();
resourceManagerGateway.setRequestSlotConsumer(slotRequest -> allocationIdFuture.complete(slotRequest.getAllocationId()));
try (SlotPoolImpl slotPool = setUpSlotPool()) {
final CompletableFuture<PhysicalSlot> slotFuture = requestNewAllocatedSlot(slotPool, new SlotRequestId());
final AllocationID allocationId = allocationIdFuture.get();
assertThat(slotFuture.isDone(), is(false));
final FlinkException cause = new FlinkException("Fail pending slot request failure.");
final Optional<ResourceID> responseFuture = slotPool.failAllocation(allocationId, cause);
assertThat(responseFuture.isPresent(), is(false));
try {
slotFuture.get();
fail("Expected a slot allocation failure.");
} catch (ExecutionException ee) {
assertThat(ExceptionUtils.stripExecutionException(ee), equalTo(cause));
}
}
}
@Test
/**
* Tests that a failing resource manager request fails a pending slot request and cancels the slot
* request at the RM (e.g. due to a TimeoutException).
*
* <p>See FLINK-7870
*/
@Test
public void testFailingResourceManagerRequestFailsPendingSlotRequestAndCancelsRMRequest() throws Exception {
try (SlotPoolImpl slotPool = setUpSlotPool()) {
final CompletableFuture<Acknowledge> requestSlotFuture = new CompletableFuture<>();
final CompletableFuture<AllocationID> cancelSlotFuture = new CompletableFuture<>();
final CompletableFuture<AllocationID> requestSlotFutureAllocationId = new CompletableFuture<>();
resourceManagerGateway.setRequestSlotFuture(requestSlotFuture);
resourceManagerGateway.setRequestSlotConsumer(slotRequest -> requestSlotFutureAllocationId.complete(slotRequest.getAllocationId()));
resourceManagerGateway.setCancelSlotConsumer(cancelSlotFuture::complete);
final CompletableFuture<PhysicalSlot> slotFuture = requestNewAllocatedSlot(slotPool, new SlotRequestId());
requestSlotFuture.completeExceptionally(new FlinkException("Testing exception."));
try {
slotFuture.get();
fail("The slot future should not have been completed properly.");
} catch (Exception ignored) {
}
assertEquals(requestSlotFutureAllocationId.get(), cancelSlotFuture.get());
}
}
/**
* Tests that a pending slot request is failed with a timeout.
*/
@Test
public void testPendingSlotRequestTimeout() throws Exception {
final ScheduledExecutorService singleThreadExecutor = Executors.newSingleThreadScheduledExecutor();
final ComponentMainThreadExecutor componentMainThreadExecutor = ComponentMainThreadExecutorServiceAdapter.forSingleThreadExecutor(singleThreadExecutor);
final SlotPoolImpl slotPool = setUpSlotPool(componentMainThreadExecutor);
try {
final Time timeout = Time.milliseconds(5L);
final CompletableFuture<PhysicalSlot> slotFuture = CompletableFuture
.supplyAsync(() -> requestNewAllocatedSlot(slotPool, new SlotRequestId(), timeout), componentMainThreadExecutor)
.thenCompose(Function.identity());
try {
slotFuture.get();
fail("Expected that the future completes with a TimeoutException.");
} catch (ExecutionException ee) {
assertThat(ExceptionUtils.stripExecutionException(ee), instanceOf(TimeoutException.class));
}
} finally {
CompletableFuture.runAsync(ThrowingRunnable.unchecked(slotPool::close), componentMainThreadExecutor).get();
singleThreadExecutor.shutdownNow();
}
}
private CompletableFuture<PhysicalSlot> requestNewAllocatedSlot(SlotPoolImpl slotPool, SlotRequestId slotRequestId) {
return requestNewAllocatedSlot(slotPool, slotRequestId, TIMEOUT);
}
private CompletableFuture<PhysicalSlot> requestNewAllocatedSlot(SlotPoolImpl slotPool, SlotRequestId slotRequestId, Time timeout) {
return slotPool.requestNewAllocatedSlot(slotRequestId, ResourceProfile.UNKNOWN, timeout);
}
private SlotPoolImpl setUpSlotPool() throws Exception {
return setUpSlotPool(mainThreadExecutor);
}
private SlotPoolImpl setUpSlotPool(ComponentMainThreadExecutor componentMainThreadExecutor) throws Exception {
final SlotPoolImpl slotPool = new TestingSlotPoolImpl(jobId);
slotPool.start(JobMasterId.generate(), "foobar", componentMainThreadExecutor);
slotPool.connectToResourceManager(resourceManagerGateway);
return slotPool;
}
} |
We need to remove connectionId is unused. | public void unregisterAndAwaitTermination(final int connectionId) {
executorServices.computeIfPresent(connectionId, (unused, executorService) -> {
executorService.shutdown();
try {
executorService.awaitTermination(Long.MAX_VALUE, TimeUnit.MILLISECONDS);
} catch (final InterruptedException ignored) {
Thread.currentThread().interrupt();
}
return null;
});
} | }); | public void unregisterAndAwaitTermination(final int connectionId) {
ExecutorService executorService = executorServices.remove(connectionId);
executorService.shutdown();
try {
executorService.awaitTermination(Long.MAX_VALUE, TimeUnit.MILLISECONDS);
} catch (final InterruptedException ignored) {
Thread.currentThread().interrupt();
}
} | class ConnectionThreadExecutorGroup {
private static final ConnectionThreadExecutorGroup INSTANCE = new ConnectionThreadExecutorGroup();
private final Map<Integer, ExecutorService> executorServices = new ConcurrentHashMap<>();
/**
* Get connection thread executor group.
*
* @return channel thread executor group
*/
public static ConnectionThreadExecutorGroup getInstance() {
return INSTANCE;
}
/**
* Register connection.
*
* @param connectionId connection id
*/
public void register(final int connectionId) {
executorServices.put(connectionId, Executors.newSingleThreadExecutor());
}
/**
* Get executor service of connection.
*
* @param connectionId connection id
* @return executor service of current channel
*/
public ExecutorService get(final int connectionId) {
return executorServices.get(connectionId);
}
/**
* Unregister connection.
*
* @param connectionId connection id
*/
} | class ConnectionThreadExecutorGroup {
private static final ConnectionThreadExecutorGroup INSTANCE = new ConnectionThreadExecutorGroup();
private final Map<Integer, ExecutorService> executorServices = new ConcurrentHashMap<>();
/**
* Get connection thread executor group.
*
* @return connection thread executor group
*/
public static ConnectionThreadExecutorGroup getInstance() {
return INSTANCE;
}
/**
* Register connection.
*
* @param connectionId connection id
*/
public void register(final int connectionId) {
executorServices.put(connectionId, Executors.newSingleThreadExecutor());
}
/**
* Get executor service of connection.
*
* @param connectionId connection id
* @return executor service of current connection
*/
public ExecutorService get(final int connectionId) {
return executorServices.get(connectionId);
}
/**
* Unregister connection and await termination.
*
* @param connectionId connection id
*/
} |
The `StateNamespace` is supposed to do `equals` and `hashCode` right so that you only need to obtain the `stringKey()` for serialization purposes. So here I think you should just compare them directly. | public void deleteTimer(Slice keyBytes, StateNamespace namespace, String timerId) {
Set<Slice> timersForKey = activeTimers.get(keyBytes);
if (timersForKey == null) {
return;
}
Iterator<Slice> timerIt = timersForKey.iterator();
while (timerIt.hasNext()) {
try {
TimerData timerData = CoderUtils.decodeFromByteArray(timerDataCoder,
timerIt.next().buffer);
ComparisonChain chain =
ComparisonChain.start().compare(timerData.getTimerId(), timerId);
if (chain.result() == 0 && !timerData.getNamespace().equals(namespace)) {
chain = chain.compare(timerData.getNamespace().stringKey(), namespace.stringKey());
}
if (chain.result() == 0) {
timerIt.remove();
}
} catch (CoderException e) {
throw new RuntimeException(e);
}
}
if (timersForKey.isEmpty()) {
activeTimers.remove(keyBytes);
}
} | public void deleteTimer(Slice keyBytes, StateNamespace namespace, String timerId) {
Set<Slice> timersForKey = activeTimers.get(keyBytes);
if (timersForKey == null) {
return;
}
Iterator<Slice> timerIt = timersForKey.iterator();
while (timerIt.hasNext()) {
try {
TimerData timerData = CoderUtils.decodeFromByteArray(timerDataCoder,
timerIt.next().buffer);
ComparisonChain chain =
ComparisonChain.start().compare(timerData.getTimerId(), timerId);
if (chain.result() == 0 && !timerData.getNamespace().equals(namespace)) {
chain = chain.compare(timerData.getNamespace().stringKey(), namespace.stringKey());
}
if (chain.result() == 0) {
timerIt.remove();
}
} catch (CoderException e) {
throw new RuntimeException(e);
}
}
if (timersForKey.isEmpty()) {
activeTimers.remove(keyBytes);
}
} | class TimerSet implements Serializable {
private final Map<Slice, Set<Slice>> activeTimers = new HashMap<>();
private final TimerDataCoder timerDataCoder;
protected TimerSet(TimerDataCoder timerDataCoder) {
this.timerDataCoder = timerDataCoder;
}
public void addTimer(Slice keyBytes, TimerData timer) {
Set<Slice> timersForKey = activeTimers.get(keyBytes);
if (timersForKey == null) {
timersForKey = new HashSet<>();
}
try {
Slice timerBytes = new Slice(CoderUtils.encodeToByteArray(timerDataCoder, timer));
timersForKey.add(timerBytes);
} catch (CoderException e) {
throw new RuntimeException(e);
}
activeTimers.put(keyBytes, timersForKey);
}
public void deleteTimer(Slice keyBytes, TimerData timerKey) {
Set<Slice> timersForKey = activeTimers.get(keyBytes);
if (timersForKey != null) {
try {
Slice timerBytes = new Slice(CoderUtils.encodeToByteArray(timerDataCoder, timerKey));
timersForKey.add(timerBytes);
timersForKey.remove(timerBytes);
} catch (CoderException e) {
throw new RuntimeException(e);
}
if (timersForKey.isEmpty()) {
activeTimers.remove(keyBytes);
} else {
activeTimers.put(keyBytes, timersForKey);
}
}
}
@VisibleForTesting
protected Map<Slice, Set<Slice>> getMap() {
return activeTimers;
}
} | class TimerSet implements Serializable {
private final Map<Slice, Set<Slice>> activeTimers = new HashMap<>();
private final TimerDataCoder timerDataCoder;
protected TimerSet(TimerDataCoder timerDataCoder) {
this.timerDataCoder = timerDataCoder;
}
public void addTimer(Slice keyBytes, TimerData timer) {
Set<Slice> timersForKey = activeTimers.get(keyBytes);
if (timersForKey == null) {
timersForKey = new HashSet<>();
}
try {
Slice timerBytes = new Slice(CoderUtils.encodeToByteArray(timerDataCoder, timer));
timersForKey.add(timerBytes);
} catch (CoderException e) {
throw new RuntimeException(e);
}
activeTimers.put(keyBytes, timersForKey);
}
public void deleteTimer(Slice keyBytes, TimerData timerKey) {
Set<Slice> timersForKey = activeTimers.get(keyBytes);
if (timersForKey != null) {
try {
Slice timerBytes = new Slice(CoderUtils.encodeToByteArray(timerDataCoder, timerKey));
timersForKey.add(timerBytes);
timersForKey.remove(timerBytes);
} catch (CoderException e) {
throw new RuntimeException(e);
}
if (timersForKey.isEmpty()) {
activeTimers.remove(keyBytes);
} else {
activeTimers.put(keyBytes, timersForKey);
}
}
}
@VisibleForTesting
protected Map<Slice, Set<Slice>> getMap() {
return activeTimers;
}
} |
|
Why do we give it as `pathVerification.tests`? Looks like the module structure is wrong. The path to test files seems to be `outside-tests/src/pathVerification.tests` but the correct path should be `outside-tests/src/pathVerification/tests`. | public void verifyTestsOutsidePath() throws BallerinaTestException {
LogLeecher passingLeecher = new LogLeecher("1 passing");
LogLeecher failingLeecher = new LogLeecher("0 failing");
balClient.runMain("test", new String[]{"pathVerification.tests"}, null, new String[0],
new LogLeecher[]{passingLeecher, failingLeecher}, projectPath);
passingLeecher.waitForText(20000);
failingLeecher.waitForText(20000);
} | balClient.runMain("test", new String[]{"pathVerification.tests"}, null, new String[0], | public void verifyTestsOutsidePath() throws BallerinaTestException {
LogLeecher passingLeecher = new LogLeecher("1 passing");
LogLeecher failingLeecher = new LogLeecher("0 failing");
balClient.runMain("test", new String[]{"pathVerification"}, null, new String[0],
new LogLeecher[]{passingLeecher, failingLeecher}, projectPath);
passingLeecher.waitForText(20000);
failingLeecher.waitForText(20000);
} | class PathVerificationTest extends BaseTestCase {
private BMainInstance balClient;
private String projectPath;
@BeforeClass
public void setup() throws BallerinaTestException {
balClient = new BMainInstance(balServer);
projectPath = outsideTestsProjectPath.toString();
}
@Test
@Test
public void verifyMissingTestsDirectory() throws BallerinaTestException {
String msg = "No tests found";
LogLeecher clientLeecher = new LogLeecher(msg);
balClient.runMain("test", new String[]{"missingTestsDirectory.tests"},
null, new String[]{}, new LogLeecher[]{clientLeecher}, projectPath);
clientLeecher.waitForText(20000);
}
} | class PathVerificationTest extends BaseTestCase {
private BMainInstance balClient;
private String projectPath;
@BeforeClass
public void setup() throws BallerinaTestException {
balClient = new BMainInstance(balServer);
projectPath = outsideTestsProjectPath.toString();
}
@Test
@Test
public void verifyMissingTestsDirectory() throws BallerinaTestException {
String msg = "No tests found";
LogLeecher clientLeecher = new LogLeecher(msg);
balClient.runMain("test", new String[]{"missingTestsDirectory"},
null, new String[]{}, new LogLeecher[]{clientLeecher}, projectPath);
clientLeecher.waitForText(20000);
}
} |
@Thul95 please create a new issue for that. Thanks! | private void propagateUserProperties() {
final String mavenCmdLine = BootstrapMavenOptions.getMavenCmdLine();
if (mavenCmdLine == null || mavenCmdLine.isEmpty()) {
return;
}
int i = mavenCmdLine.indexOf("-D");
if (i < 0) {
return;
}
final StringBuilder buf = new StringBuilder();
buf.append("-D");
i += 2;
while (i < mavenCmdLine.length()) {
final char ch = mavenCmdLine.charAt(i++);
if (!Character.isWhitespace(ch)) {
buf.append(ch);
} else if (buf.length() > 2) {
args.add(buf.toString());
buf.setLength(2);
i = mavenCmdLine.indexOf("-D", i);
if (i < 0) {
break;
}
i += 2;
}
}
if (buf.length() > 2) {
args.add(buf.toString());
}
} | if (i < 0) { | private void propagateUserProperties() {
final String mavenCmdLine = BootstrapMavenOptions.getMavenCmdLine();
if (mavenCmdLine == null || mavenCmdLine.isEmpty()) {
return;
}
int i = mavenCmdLine.indexOf("-D");
if (i < 0) {
return;
}
final StringBuilder buf = new StringBuilder();
buf.append("-D");
i += 2;
while (i < mavenCmdLine.length()) {
final char ch = mavenCmdLine.charAt(i++);
if (!Character.isWhitespace(ch)) {
buf.append(ch);
} else if (buf.length() > 2) {
args.add(buf.toString());
buf.setLength(2);
i = mavenCmdLine.indexOf("-D", i);
if (i < 0) {
break;
}
i += 2;
}
}
if (buf.length() > 2) {
args.add(buf.toString());
}
} | class loader
File wiringClassesDirectory = new File(buildDir, "wiring-devmode");
wiringClassesDirectory.mkdirs();
addToClassPaths(classPathManifest, devModeContext, wiringClassesDirectory);
for (Artifact appDep : project.getArtifacts()) {
addToClassPaths(classPathManifest, devModeContext, appDep.getFile());
} | class loader
File wiringClassesDirectory = new File(buildDir, "wiring-devmode");
wiringClassesDirectory.mkdirs();
addToClassPaths(classPathManifest, devModeContext, wiringClassesDirectory);
for (Artifact appDep : project.getArtifacts()) {
addToClassPaths(classPathManifest, devModeContext, appDep.getFile());
} |
Moved the logic to new class "TransformUpgrader" and added unit tests to "TransformUpgraderTest". | public void visitPrimitiveTransform(Node node) {
children.put(node.getEnclosingNode(), node.toAppliedPTransform(pipeline));
try {
components.registerPTransform(
node.toAppliedPTransform(pipeline), Collections.emptyList());
} catch (IOException e) {
throw new IllegalStateException(e);
}
} | try { | public void visitPrimitiveTransform(Node node) {
children.put(node.getEnclosingNode(), node.toAppliedPTransform(pipeline));
try {
components.registerPTransform(
node.toAppliedPTransform(pipeline), Collections.emptyList());
} catch (IOException e) {
throw new IllegalStateException(e);
}
} | class PipelineTranslation {
public static RunnerApi.Pipeline toProto(Pipeline pipeline) {
return toProto(pipeline, SdkComponents.create(pipeline.getOptions()));
}
public static RunnerApi.Pipeline toProto(Pipeline pipeline, boolean useDeprecatedViewTransforms) {
return toProto(
pipeline, SdkComponents.create(pipeline.getOptions()), useDeprecatedViewTransforms);
}
public static RunnerApi.Pipeline toProto(Pipeline pipeline, SdkComponents components) {
return toProto(pipeline, components, false);
}
public static RunnerApi.Pipeline toProto(
final Pipeline pipeline,
final SdkComponents components,
boolean useDeprecatedViewTransforms) {
final List<String> rootIds = new ArrayList<>();
pipeline.traverseTopologically(
new PipelineVisitor.Defaults() {
private final ListMultimap<Node, AppliedPTransform<?, ?, ?>> children =
ArrayListMultimap.create();
@Override
public void leaveCompositeTransform(Node node) {
if (node.isRootNode()) {
for (AppliedPTransform<?, ?, ?> pipelineRoot : children.get(node)) {
rootIds.add(components.getExistingPTransformId(pipelineRoot));
}
} else {
children.put(node.getEnclosingNode(), node.toAppliedPTransform(pipeline));
try {
components.registerPTransform(
node.toAppliedPTransform(pipeline), children.get(node));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
@Override
});
RunnerApi.Pipeline res =
RunnerApi.Pipeline.newBuilder()
.setComponents(components.toComponents())
.addAllRequirements(components.requirements())
.addAllRootTransformIds(rootIds)
.build();
if (!useDeprecatedViewTransforms) {
res = elideDeprecatedViews(res);
}
ExternalTranslationOptions externalTranslationOptions =
pipeline.getOptions().as(ExternalTranslationOptions.class);
List<String> urnsToOverride = externalTranslationOptions.getTransformsToOverride();
if (urnsToOverride.size() > 0) {
Map<String, PTransform> transforms = res.getComponents().getTransformsMap();
List<String> alreadyCheckedURns = new ArrayList<>();
for (Entry<String, PTransform> entry : transforms.entrySet()) {
String urn = entry.getValue().getSpec().getUrn();
if (!alreadyCheckedURns.contains(urn) && urnsToOverride.contains(urn)) {
alreadyCheckedURns.add(urn);
List<
AppliedPTransform<
PInput,
POutput,
org.apache.beam.sdk.transforms.PTransform<? super PInput, POutput>>>
appliedPTransforms =
findAppliedPTransforms(
urn, pipeline, KnownTransformPayloadTranslator.KNOWN_PAYLOAD_TRANSLATORS);
for (AppliedPTransform<
PInput,
POutput,
org.apache.beam.sdk.transforms.PTransform<? super PInput, POutput>>
appliedPTransform : appliedPTransforms) {
TransformPayloadTranslator<
org.apache.beam.sdk.transforms.PTransform<? super PInput, POutput>>
payloadTranslator =
KnownTransformPayloadTranslator.KNOWN_PAYLOAD_TRANSLATORS.get(
appliedPTransform.getTransform().getClass());
try {
res =
updateTransformViaTransformService(
urn, appliedPTransform, payloadTranslator, pipeline, res);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
}
}
PipelineValidator.validate(res);
return res;
}
private static int findAvailablePort() throws IOException {
ServerSocket s = new ServerSocket(0);
try {
return s.getLocalPort();
} finally {
s.close();
try {
Thread.sleep(100);
} catch (InterruptedException exn) {
}
}
}
private static <
InputT extends PInput,
OutputT extends POutput,
TransformT extends org.apache.beam.sdk.transforms.PTransform<InputT, OutputT>>
RunnerApi.Pipeline updateTransformViaTransformService(
String urn,
AppliedPTransform<
PInput,
POutput,
org.apache.beam.sdk.transforms.PTransform<? super PInput, POutput>>
appliedPTransform,
TransformPayloadTranslator<
org.apache.beam.sdk.transforms.PTransform<? super PInput, POutput>>
originalPayloadTranslator,
Pipeline pipeline,
RunnerApi.Pipeline runnerAPIpipeline)
throws IOException {
ExternalTranslationOptions externalTranslationOptions =
pipeline.getOptions().as(ExternalTranslationOptions.class);
Row configRow = originalPayloadTranslator.toConfigRow(appliedPTransform.getTransform());
ByteStringOutputStream outputStream = new ByteStringOutputStream();
try {
RowCoder.of(configRow.getSchema()).encode(configRow, outputStream);
} catch (IOException e) {
throw new RuntimeException(e);
}
ExternalTransforms.ExternalConfigurationPayload payload =
ExternalTransforms.ExternalConfigurationPayload.newBuilder()
.setSchema(SchemaTranslation.schemaToProto(configRow.getSchema(), true))
.setPayload(outputStream.toByteString())
.build();
String serviceAddress = null;
TransformServiceLauncher service = null;
try {
if (externalTranslationOptions.getTransformServiceAddress() != null) {
serviceAddress = externalTranslationOptions.getTransformServiceAddress();
} else if (externalTranslationOptions.getTransformServiceBeamVersion() != null) {
String projectName = UUID.randomUUID().toString();
service = TransformServiceLauncher.forProject(projectName, findAvailablePort());
service.setBeamVersion(externalTranslationOptions.getTransformServiceBeamVersion());
service.start();
service.waitTillUp(15000);
} else {
throw new IllegalArgumentException(
"Either option TransformServiceAddress or option TransformServiceBeamVersion should be provided to override a transform using the transform service");
}
if (serviceAddress == null) {
throw new IllegalArgumentException(
"Cannot override the transform "
+ urn
+ " since a valid transform service address could not be determined");
}
ExpandableTransform<InputT, OutputT> externalTransform =
(ExpandableTransform<InputT, OutputT>)
External.of(urn, payload.toByteArray(), serviceAddress);
PCollectionTuple input = PCollectionTuple.empty(pipeline);
for (TupleTag<?> tag : (Set<TupleTag<?>>) appliedPTransform.getInputs().keySet()) {
PCollection<?> pc = appliedPTransform.getInputs().get(tag);
if (pc == null) {
throw new IllegalArgumentException(
"Input of transform " + appliedPTransform + " with tag " + tag + " was null.");
}
input = input.and(tag, (PCollection) pc);
}
POutput output = externalTransform.expand((InputT) input);
Map<TupleTag<?>, PCollection<?>> originalOutputs = appliedPTransform.getOutputs();
Map<String, String> inputReplacements = new HashMap<>();
Map<TupleTag<?>, PCollection<?>> newOutputs = new HashMap<>();
if (output instanceof PCollectionTuple) {
newOutputs.putAll(((PCollectionTuple) output).getAll());
for (Map.Entry<TupleTag<?>, PCollection<?>> entry : newOutputs.entrySet()) {
if (entry == null) {
throw new IllegalArgumentException(
"Found unexpected null entry when iterating the outputs of expanded "
+ "ExpandableTransform "
+ externalTransform);
}
if (!appliedPTransform.getOutputs().containsKey(entry.getKey())) {
throw new RuntimeException(
"Could not find the tag " + entry.getKey() + " in the original set of outputs");
}
PCollection<?> originalOutputPc = originalOutputs.get(entry.getKey());
if (originalOutputPc == null) {
throw new IllegalArgumentException(
"Original output of transform "
+ appliedPTransform
+ " with tag "
+ entry.getKey()
+ " was null");
}
inputReplacements.put(originalOutputPc.getName(), entry.getValue().getName());
}
} else if (output instanceof PCollection) {
newOutputs.put(new TupleTag<>("temp_main_tag"), (PCollection) output);
inputReplacements.put(
originalOutputs.get(originalOutputs.keySet().iterator().next()).getName(),
((PCollection) output).getName());
} else {
throw new RuntimeException("Unexpected output type");
}
AppliedPTransform<?, ?, ?> updatedAppliedPTransform =
AppliedPTransform.of(
appliedPTransform.getFullName() + "_external",
appliedPTransform.getInputs(),
newOutputs,
externalTransform,
externalTransform.getResourceHints(),
appliedPTransform.getPipeline());
SdkComponents updatedComponents =
SdkComponents.create(
runnerAPIpipeline.getComponents(), runnerAPIpipeline.getRequirementsList());
String updatedTransformId =
updatedComponents.registerPTransform(updatedAppliedPTransform, Collections.emptyList());
RunnerApi.Components updatedRunnerApiComponents = updatedComponents.toComponents();
Map<String, Map<String, String>> transformInputUpdates = new HashMap<>();
List<String> oldTransformIds = new ArrayList<>();
updatedRunnerApiComponents
.getTransformsMap()
.forEach(
(transformId, transform) -> {
Map<String, String> updatedInputMap = new HashMap<>();
for (Map.Entry<String, String> entry : transform.getInputsMap().entrySet()) {
if (inputReplacements.containsKey(entry.getValue())) {
updatedInputMap.put(entry.getKey(), inputReplacements.get(entry.getValue()));
}
}
for (Map.Entry<String, String> entry : transform.getOutputsMap().entrySet()) {
if (inputReplacements.containsKey(entry.getValue())
&& urn.equals(transform.getSpec().getUrn())) {
oldTransformIds.add(transformId);
}
}
if (updatedInputMap.size() > 0) {
transformInputUpdates.put(transformId, updatedInputMap);
}
});
if (oldTransformIds.size() != 1) {
throw new IOException(
"Expected exactly one transform to be updated by "
+ oldTransformIds.size()
+ " were updated.");
}
String oldTransformId = oldTransformIds.get(0);
List<String> updaterRootTransformIds = new ArrayList<>();
updaterRootTransformIds.addAll(runnerAPIpipeline.getRootTransformIdsList());
if (updaterRootTransformIds.contains(oldTransformId)) {
updaterRootTransformIds.remove(oldTransformId);
updaterRootTransformIds.add(updatedTransformId);
}
Map<String, RunnerApi.PTransform> updatedTransforms = new HashMap<>();
updatedRunnerApiComponents
.getTransformsMap()
.forEach(
(transformId, transform) -> {
if (transformId.equals(oldTransformId)) {
return;
}
PTransform.Builder transformBuilder = transform.toBuilder();
if (transformInputUpdates.containsKey(transformId)) {
Map<String, String> inputUpdates = transformInputUpdates.get(transformId);
transformBuilder
.getInputsMap()
.forEach(
(key, value) -> {
if (inputUpdates.containsKey(key)) {
transformBuilder.putInputs(key, inputUpdates.get(key));
}
});
}
if (transform.getSubtransformsList().contains(oldTransformId)) {
List<String> updatedSubTransformsList = new ArrayList<>();
updatedSubTransformsList.addAll(transform.getSubtransformsList());
updatedSubTransformsList.remove(oldTransformId);
updatedSubTransformsList.add(updatedTransformId);
transformBuilder.clearSubtransforms();
transformBuilder.addAllSubtransforms(updatedSubTransformsList);
}
updatedTransforms.put(transformId, transformBuilder.build());
});
updatedRunnerApiComponents =
updatedRunnerApiComponents
.toBuilder()
.putAllTransforms(updatedTransforms)
.removeTransforms(oldTransformId)
.build();
RunnerApi.Pipeline updatedPipeline =
RunnerApi.Pipeline.newBuilder()
.setComponents(updatedRunnerApiComponents)
.addAllRequirements(updatedComponents.requirements())
.addAllRootTransformIds(updaterRootTransformIds)
.build();
return updatedPipeline;
} catch (TimeoutException e) {
throw new IOException(e);
} finally {
if (service != null) {
service.shutdown();
}
}
}
@SuppressWarnings({
"rawtypes"
})
private static <
InputT extends PInput,
OutputT extends POutput,
TransformT extends org.apache.beam.sdk.transforms.PTransform<? super InputT, OutputT>>
List<AppliedPTransform<InputT, OutputT, TransformT>> findAppliedPTransforms(
String urn,
Pipeline pipeline,
Map<
Class<? extends org.apache.beam.sdk.transforms.PTransform>,
TransformPayloadTranslator>
knownTranslators) {
List<AppliedPTransform<InputT, OutputT, TransformT>> appliedPTransforms = new ArrayList<>();
pipeline.traverseTopologically(
new PipelineVisitor.Defaults() {
void findMatchingAppliedPTransform(Node node) {
org.apache.beam.sdk.transforms.PTransform<?, ?> transform = node.getTransform();
if (transform == null) {
return;
}
if (knownTranslators.containsKey(transform.getClass())) {
TransformPayloadTranslator<TransformT> translator =
knownTranslators.get(transform.getClass());
if (translator.getUrn() != null && translator.getUrn().equals(urn)) {
appliedPTransforms.add(
(AppliedPTransform<InputT, OutputT, TransformT>)
node.toAppliedPTransform(pipeline));
}
}
}
@Override
public void leaveCompositeTransform(Node node) {
findMatchingAppliedPTransform(node);
}
@Override
public void visitPrimitiveTransform(Node node) {
findMatchingAppliedPTransform(node);
}
});
return appliedPTransforms;
}
private static RunnerApi.Pipeline elideDeprecatedViews(RunnerApi.Pipeline pipeline) {
Set<String> viewTransforms = new HashSet<>();
Map<String, String> viewOutputsToInputs = new HashMap<>();
pipeline
.getComponents()
.getTransformsMap()
.forEach(
(transformId, transform) -> {
if (transform
.getSpec()
.getUrn()
.equals(PTransformTranslation.CREATE_VIEW_TRANSFORM_URN)) {
viewTransforms.add(transformId);
viewOutputsToInputs.put(
Iterables.getOnlyElement(transform.getOutputsMap().values()),
Iterables.getOnlyElement(transform.getInputsMap().values()));
}
});
Map<String, RunnerApi.PTransform> newTransforms = new HashMap<>();
pipeline
.getComponents()
.getTransformsMap()
.forEach(
(transformId, transform) -> {
RunnerApi.PTransform.Builder transformBuilder = transform.toBuilder();
transform
.getInputsMap()
.forEach(
(key, value) -> {
if (viewOutputsToInputs.containsKey(value)) {
transformBuilder.putInputs(key, viewOutputsToInputs.get(value));
}
});
transform
.getOutputsMap()
.forEach(
(key, value) -> {
if (viewOutputsToInputs.containsKey(value)) {
transformBuilder.putOutputs(key, viewOutputsToInputs.get(value));
}
});
transformBuilder.clearSubtransforms();
transformBuilder.addAllSubtransforms(
transform.getSubtransformsList().stream()
.filter(id -> !viewTransforms.contains(id))
.collect(Collectors.toList()));
newTransforms.put(transformId, transformBuilder.build());
});
RunnerApi.Pipeline.Builder newPipeline = pipeline.toBuilder();
newPipeline.getComponentsBuilder().putAllTransforms(newTransforms);
viewTransforms.forEach(newPipeline.getComponentsBuilder()::removeTransforms);
viewOutputsToInputs.keySet().forEach(newPipeline.getComponentsBuilder()::removePcollections);
newPipeline.clearRootTransformIds();
newPipeline.addAllRootTransformIds(
pipeline.getRootTransformIdsList().stream()
.filter(id -> !viewTransforms.contains(id))
.collect(Collectors.toList()));
return newPipeline.build();
}
} | class PipelineTranslation {
public static RunnerApi.Pipeline toProto(Pipeline pipeline) {
return toProto(pipeline, SdkComponents.create(pipeline.getOptions()));
}
public static RunnerApi.Pipeline toProto(Pipeline pipeline, boolean useDeprecatedViewTransforms) {
return toProto(
pipeline, SdkComponents.create(pipeline.getOptions()), useDeprecatedViewTransforms);
}
public static RunnerApi.Pipeline toProto(Pipeline pipeline, SdkComponents components) {
return toProto(pipeline, components, false);
}
public static RunnerApi.Pipeline toProto(
final Pipeline pipeline,
final SdkComponents components,
boolean useDeprecatedViewTransforms) {
final List<String> rootIds = new ArrayList<>();
pipeline.traverseTopologically(
new PipelineVisitor.Defaults() {
private final ListMultimap<Node, AppliedPTransform<?, ?, ?>> children =
ArrayListMultimap.create();
@Override
public void leaveCompositeTransform(Node node) {
if (node.isRootNode()) {
for (AppliedPTransform<?, ?, ?> pipelineRoot : children.get(node)) {
rootIds.add(components.getExistingPTransformId(pipelineRoot));
}
} else {
children.put(node.getEnclosingNode(), node.toAppliedPTransform(pipeline));
try {
components.registerPTransform(
node.toAppliedPTransform(pipeline), children.get(node));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
@Override
});
RunnerApi.Pipeline res =
RunnerApi.Pipeline.newBuilder()
.setComponents(components.toComponents())
.addAllRequirements(components.requirements())
.addAllRootTransformIds(rootIds)
.build();
if (!useDeprecatedViewTransforms) {
res = elideDeprecatedViews(res);
}
ExternalTranslationOptions externalTranslationOptions =
pipeline.getOptions().as(ExternalTranslationOptions.class);
List<String> urnsToOverride = externalTranslationOptions.getTransformsToOverride();
if (urnsToOverride.size() > 0) {
try (TransformUpgrader upgrader = TransformUpgrader.of()) {
res =
upgrader.upgradeTransformsViaTransformService(
res, urnsToOverride, externalTranslationOptions);
} catch (Exception e) {
throw new RuntimeException(
"Could not override the transforms with URNs " + urnsToOverride, e);
}
}
PipelineValidator.validate(res);
return res;
}
private static RunnerApi.Pipeline elideDeprecatedViews(RunnerApi.Pipeline pipeline) {
Set<String> viewTransforms = new HashSet<>();
Map<String, String> viewOutputsToInputs = new HashMap<>();
pipeline
.getComponents()
.getTransformsMap()
.forEach(
(transformId, transform) -> {
if (transform
.getSpec()
.getUrn()
.equals(PTransformTranslation.CREATE_VIEW_TRANSFORM_URN)) {
viewTransforms.add(transformId);
viewOutputsToInputs.put(
Iterables.getOnlyElement(transform.getOutputsMap().values()),
Iterables.getOnlyElement(transform.getInputsMap().values()));
}
});
Map<String, RunnerApi.PTransform> newTransforms = new HashMap<>();
pipeline
.getComponents()
.getTransformsMap()
.forEach(
(transformId, transform) -> {
RunnerApi.PTransform.Builder transformBuilder = transform.toBuilder();
transform
.getInputsMap()
.forEach(
(key, value) -> {
if (viewOutputsToInputs.containsKey(value)) {
transformBuilder.putInputs(key, viewOutputsToInputs.get(value));
}
});
transform
.getOutputsMap()
.forEach(
(key, value) -> {
if (viewOutputsToInputs.containsKey(value)) {
transformBuilder.putOutputs(key, viewOutputsToInputs.get(value));
}
});
transformBuilder.clearSubtransforms();
transformBuilder.addAllSubtransforms(
transform.getSubtransformsList().stream()
.filter(id -> !viewTransforms.contains(id))
.collect(Collectors.toList()));
newTransforms.put(transformId, transformBuilder.build());
});
RunnerApi.Pipeline.Builder newPipeline = pipeline.toBuilder();
newPipeline.getComponentsBuilder().putAllTransforms(newTransforms);
viewTransforms.forEach(newPipeline.getComponentsBuilder()::removeTransforms);
viewOutputsToInputs.keySet().forEach(newPipeline.getComponentsBuilder()::removePcollections);
newPipeline.clearRootTransformIds();
newPipeline.addAllRootTransformIds(
pipeline.getRootTransformIdsList().stream()
.filter(id -> !viewTransforms.contains(id))
.collect(Collectors.toList()));
return newPipeline.build();
}
} |
Will this result in `long`, `bool` types formatted as string? `"key": "false"` instead of `"key": false` | public StringBuilder writeKeyAndValue(StringBuilder formatter) {
formatter.append("\"");
JSON_STRING_ENCODER.quoteAsString(key, formatter);
formatter.append("\":");
String valueStr = null;
if (value != null) {
if (!(value instanceof String)) {
JSON_STRING_ENCODER.quoteAsString(value.toString(), formatter);
return formatter;
}
valueStr = (String) value;
} else if (valueSupplier != null) {
valueStr = valueSupplier.get();
}
if (valueStr == null) {
return formatter.append("null");
}
formatter.append("\"");
JSON_STRING_ENCODER.quoteAsString(valueStr, formatter);
return formatter.append("\"");
} | JSON_STRING_ENCODER.quoteAsString(value.toString(), formatter); | public StringBuilder writeKeyAndValue(StringBuilder formatter) {
formatter.append("\"");
JSON_STRING_ENCODER.quoteAsString(key, formatter);
formatter.append("\":");
String valueStr = null;
if (value != null) {
if (!(value instanceof String)) {
JSON_STRING_ENCODER.quoteAsString(value.toString(), formatter);
return formatter;
}
valueStr = (String) value;
} else if (valueSupplier != null) {
valueStr = valueSupplier.get();
}
if (valueStr == null) {
return formatter.append("null");
}
formatter.append("\"");
JSON_STRING_ENCODER.quoteAsString(valueStr, formatter);
return formatter.append("\"");
} | class ContextKeyValuePair {
private final String key;
private final Object value;
private final Supplier<String> valueSupplier;
ContextKeyValuePair(String key, Object value) {
this.key = key;
this.value = value;
this.valueSupplier = null;
}
ContextKeyValuePair(String key, Supplier<String> valueSupplier) {
this.key = key;
this.value = null;
this.valueSupplier = valueSupplier;
}
/**
* Writes {"key":"value"} json string to provided StringBuilder.
*/
} | class ContextKeyValuePair {
private final String key;
private final Object value;
private final Supplier<String> valueSupplier;
ContextKeyValuePair(String key, Object value) {
this.key = key;
this.value = value;
this.valueSupplier = null;
}
ContextKeyValuePair(String key, Supplier<String> valueSupplier) {
this.key = key;
this.value = null;
this.valueSupplier = valueSupplier;
}
/**
* Writes "key":"value" json string to provided StringBuilder.
*/
} |
we shouldn't eagerly read the stream here. Rather substitute request body with wrapped stream and the validation will happen lazily when stream is read. See https://github.com/Azure/azure-sdk-for-java/blob/400e90a8ceced0f0ad5d586532db6637d29b45d6/sdk/core/azure-core/src/main/java/com/azure/core/implementation/util/LengthValidatingInputStream.java#L48 . | static BinaryData validateLengthSync(final HttpRequest request) {
final BinaryData binaryData = request.getContent();
if (binaryData == null) {
return binaryData;
}
final long expectedLength = Long.parseLong(request.getHeaders().getValue("Content-Length"));
Long length = binaryData.getLength();
BinaryDataContent bdc = BinaryDataHelper.getContent(binaryData);
if (length == null) {
if (bdc instanceof FluxByteBufferContent) {
throw new IllegalStateException("Flux Byte Buffer is not supported in Synchronous Rest Proxy.");
} else if (bdc instanceof InputStreamContent) {
LengthValidatingInputStream lengthValidatingInputStream =
new LengthValidatingInputStream(((InputStreamContent) bdc).toStream(), expectedLength);
try {
lengthValidatingInputStream.readAllBytes();
} catch (IOException e) {
throw new UncheckedIOException(e);
}
} else {
long len = (bdc).toBytes().length;
if (len > expectedLength) {
throw new UnexpectedLengthException(String.format(BODY_TOO_LARGE,
len, expectedLength), len, expectedLength);
}
}
}
return binaryData;
} | lengthValidatingInputStream.readAllBytes(); | static BinaryData validateLengthSync(final HttpRequest request) {
final BinaryData binaryData = request.getBodyAsBinaryData();
if (binaryData == null) {
return binaryData;
}
final long expectedLength = Long.parseLong(request.getHeaders().getValue("Content-Length"));
Long length = binaryData.getLength();
BinaryDataContent bdc = BinaryDataHelper.getContent(binaryData);
if (length == null) {
if (bdc instanceof FluxByteBufferContent) {
throw new IllegalStateException("Flux Byte Buffer is not supported in Synchronous Rest Proxy.");
} else if (bdc instanceof InputStreamContent) {
InputStreamContent inputStreamContent = ((InputStreamContent) bdc);
InputStream inputStream = inputStreamContent.toStream();
LengthValidatingInputStream lengthValidatingInputStream =
new LengthValidatingInputStream(inputStream, expectedLength);
return BinaryData.fromStream(lengthValidatingInputStream);
} else {
byte[] b = (bdc).toBytes();
long len = b.length;
if (len > expectedLength) {
throw new UnexpectedLengthException(String.format(BODY_TOO_LARGE,
len, expectedLength), len, expectedLength);
}
return BinaryData.fromBytes(b);
}
} else {
if (length > expectedLength) {
throw new UnexpectedLengthException(String.format(BODY_TOO_LARGE,
length, expectedLength), length, expectedLength);
}
return binaryData;
}
} | class is
if (method.isAnnotationPresent(com.azure.core.annotation.ResumeOperation.class)) {
throw LOGGER.logExceptionAsError(new IllegalStateException("'ResumeOperation' isn't supported."));
} | class is
if (method.isAnnotationPresent(com.azure.core.annotation.ResumeOperation.class)) {
throw LOGGER.logExceptionAsError(new IllegalStateException("'ResumeOperation' isn't supported."));
} |
Since the method checks whether the type is valid numeric type or not, I tried to lift the nil of option type and check whether the type is valid or not | boolean validNumericTypeExists(BType type) {
if (type.isNullable() && type.tag != TypeTags.ANY && type.tag != TypeTags.NIL) {
type = ((BUnionType) type).getMemberTypes().iterator().next();
}
if (isBasicNumericType(type)) {
return true;
}
switch (type.tag) {
case TypeTags.UNION:
BUnionType unionType = (BUnionType) type;
Set<BType> memberTypes = unionType.getMemberTypes();
BType firstTypeInUnion = memberTypes.iterator().next();
if (firstTypeInUnion.tag == TypeTags.FINITE) {
Set<BLangExpression> valSpace = ((BFiniteType) firstTypeInUnion).getValueSpace();
BType baseExprType = valSpace.iterator().next().getBType();
for (BType memType : memberTypes) {
if (memType.tag == TypeTags.FINITE) {
if (!checkValueSpaceHasSameType((BFiniteType) memType, baseExprType)) {
return false;
}
}
if (!checkValidNumericTypesInUnion(memType, firstTypeInUnion.tag)) {
return false;
}
}
} else {
for (BType memType : memberTypes) {
if (!checkValidNumericTypesInUnion(memType, firstTypeInUnion.tag)) {
return false;
}
}
}
return true;
case TypeTags.FINITE:
Set<BLangExpression> valSpace = ((BFiniteType) type).getValueSpace();
BType baseExprType = valSpace.iterator().next().getBType();
for (BLangExpression expr : valSpace) {
if (!checkValueSpaceHasSameType((BFiniteType) type, baseExprType)) {
return false;
}
if (!validNumericTypeExists(expr.getBType())) {
return false;
}
}
return true;
default:
return false;
}
} | } | boolean validNumericTypeExists(BType type) {
if (type.isNullable() && type.tag != TypeTags.NIL) {
type = getSafeType(type, true, false);
}
if (isBasicNumericType(type)) {
return true;
}
switch (type.tag) {
case TypeTags.UNION:
BUnionType unionType = (BUnionType) type;
Set<BType> memberTypes = unionType.getMemberTypes();
BType firstTypeInUnion = memberTypes.iterator().next();
if (firstTypeInUnion.tag == TypeTags.FINITE) {
Set<BLangExpression> valSpace = ((BFiniteType) firstTypeInUnion).getValueSpace();
BType baseExprType = valSpace.iterator().next().getBType();
for (BType memType : memberTypes) {
if (memType.tag == TypeTags.FINITE) {
if (!checkValueSpaceHasSameType((BFiniteType) memType, baseExprType)) {
return false;
}
}
if (!checkValidNumericTypesInUnion(memType, firstTypeInUnion.tag)) {
return false;
}
}
} else {
for (BType memType : memberTypes) {
if (!checkValidNumericTypesInUnion(memType, firstTypeInUnion.tag)) {
return false;
}
}
}
return true;
case TypeTags.FINITE:
Set<BLangExpression> valSpace = ((BFiniteType) type).getValueSpace();
BType baseExprType = valSpace.iterator().next().getBType();
for (BLangExpression expr : valSpace) {
if (!checkValueSpaceHasSameType((BFiniteType) type, baseExprType)) {
return false;
}
if (!validNumericTypeExists(expr.getBType())) {
return false;
}
}
return true;
default:
return false;
}
} | class BOrderedTypeVisitor implements BTypeVisitor<BType, Boolean> {
Set<TypePair> unresolvedTypes;
BOrderedTypeVisitor(Set<TypePair> unresolvedTypes) {
this.unresolvedTypes = unresolvedTypes;
}
@Override
public Boolean visit(BType target, BType source) {
if (isSimpleBasicType(source.tag) && isSimpleBasicType(target.tag)) {
return (source == target) || isIntOrStringType(source.tag, target.tag);
}
if (source.tag == TypeTags.FINITE) {
return checkValueSpaceHasSameType(((BFiniteType) source), target);
}
return isSameOrderedType(target, source, this.unresolvedTypes);
}
@Override
public Boolean visit(BArrayType target, BType source) {
if (source.tag != TypeTags.ARRAY) {
return false;
}
BArrayType rhsArrayType = (BArrayType) source;
boolean hasSameOrderedTypeElements = isSameOrderedType(target.eType, rhsArrayType.eType, unresolvedTypes);
if (target.state == BArrayState.OPEN) {
return (rhsArrayType.state == BArrayState.OPEN) && hasSameOrderedTypeElements;
}
return hasSameOrderedTypeElements;
}
@Override
public Boolean visit(BTupleType target, BType source) {
if (source.tag != TypeTags.TUPLE || !hasSameReadonlyFlag(source, target)) {
return false;
}
BTupleType sourceT = (BTupleType) source;
BType sourceRestType = sourceT.restType;
BType targetRestType = target.restType;
int sourceTupleCount = sourceT.tupleTypes.size();
int targetTupleCount = target.tupleTypes.size();
int len = Math.min(sourceTupleCount, targetTupleCount);
for (int i = 0; i < len; i++) {
if (!isSameOrderedType(sourceT.getTupleTypes().get(i), target.tupleTypes.get(i),
this.unresolvedTypes)) {
return false;
}
}
if (sourceTupleCount == targetTupleCount) {
if (sourceRestType == null || targetRestType == null) {
return true;
}
return isSameOrderedType(sourceRestType, targetRestType, this.unresolvedTypes);
}
if (sourceTupleCount > targetTupleCount) {
return checkSameOrderedTypeInTuples(sourceT, sourceTupleCount, targetTupleCount, sourceRestType,
targetRestType);
}
return checkSameOrderedTypeInTuples(target, targetTupleCount, sourceTupleCount, targetRestType,
sourceRestType);
}
private boolean checkSameOrderedTypeInTuples(BTupleType source, int sourceTupleCount,
int targetTupleCount,
BType sourceRestType, BType targetRestType) {
if (targetRestType == null) {
return true;
}
for (int i = targetTupleCount; i < sourceTupleCount; i++) {
if (!isSameOrderedType(source.getTupleTypes().get(i), targetRestType, this.unresolvedTypes)) {
return false;
}
}
if (sourceRestType == null) {
return true;
}
return isSameOrderedType(sourceRestType, targetRestType, this.unresolvedTypes);
}
@Override
public Boolean visit(BUnionType target, BType source) {
if (source.tag != TypeTags.UNION || !hasSameReadonlyFlag(source, target)) {
return checkUnionHasSameType(target.getMemberTypes(), source);
}
BUnionType sUnionType = (BUnionType) source;
LinkedHashSet<BType> sourceTypes = sUnionType.getMemberTypes();
LinkedHashSet<BType> targetTypes = target.getMemberTypes();
if (checkUnionHasAllFiniteOrNilMembers(sourceTypes) &&
checkUnionHasAllFiniteOrNilMembers(targetTypes)) {
if (sourceTypes.contains(symTable.nilType) != targetTypes.contains(symTable.nilType)) {
return false;
}
return checkValueSpaceHasSameType(((BFiniteType) target.getMemberTypes().iterator().next()),
sUnionType.getMemberTypes().iterator().next());
}
if (sUnionType.getMemberTypes().size()
!= target.getMemberTypes().size()) {
return false;
}
return checkSameOrderedTypesInUnionMembers(sourceTypes, targetTypes);
}
private boolean checkSameOrderedTypesInUnionMembers(LinkedHashSet<BType> sourceTypes,
LinkedHashSet<BType> targetTypes) {
for (BType sourceT : sourceTypes) {
boolean foundSameOrderedType = false;
for (BType targetT : targetTypes) {
if (isSameOrderedType(targetT, sourceT, this.unresolvedTypes)) {
foundSameOrderedType = true;
break;
}
}
if (!foundSameOrderedType) {
return false;
}
}
return true;
}
@Override
public Boolean visit(BFiniteType t, BType s) {
return checkValueSpaceHasSameType(t, s);
}
private boolean hasSameReadonlyFlag(BType source, BType target) {
return Symbols.isFlagOn(target.flags, Flags.READONLY) == Symbols.isFlagOn(source.flags, Flags.READONLY);
}
@Override
public Boolean visit(BBuiltInRefType t, BType s) {
return false;
}
@Override
public Boolean visit(BAnyType t, BType s) {
return false;
}
@Override
public Boolean visit(BAnydataType t, BType s) {
return false;
}
@Override
public Boolean visit(BMapType t, BType s) {
return false;
}
@Override
public Boolean visit(BFutureType t, BType s) {
return false;
}
@Override
public Boolean visit(BXMLType t, BType s) {
return false;
}
@Override
public Boolean visit(BJSONType t, BType s) {
return false;
}
@Override
public Boolean visit(BObjectType t, BType s) {
return false;
}
@Override
public Boolean visit(BRecordType t, BType s) {
return false;
}
@Override
public Boolean visit(BStreamType t, BType s) {
return false;
}
@Override
public Boolean visit(BTableType t, BType s) {
return false;
}
@Override
public Boolean visit(BInvokableType t, BType s) {
return false;
}
@Override
public Boolean visit(BIntersectionType tIntersectionType, BType s) {
return false;
}
@Override
public Boolean visit(BErrorType t, BType s) {
return false;
}
@Override
public Boolean visit(BTypedescType t, BType s) {
return false;
}
@Override
public Boolean visit(BParameterizedType t, BType s) {
return false;
}
} | class BOrderedTypeVisitor implements BTypeVisitor<BType, Boolean> {
Set<TypePair> unresolvedTypes;
BOrderedTypeVisitor(Set<TypePair> unresolvedTypes) {
this.unresolvedTypes = unresolvedTypes;
}
@Override
public Boolean visit(BType target, BType source) {
if (isSimpleBasicType(source.tag) && isSimpleBasicType(target.tag)) {
return (source == target) || isIntOrStringType(source.tag, target.tag);
}
if (source.tag == TypeTags.FINITE) {
return checkValueSpaceHasSameType(((BFiniteType) source), target);
}
return isSameOrderedType(target, source, this.unresolvedTypes);
}
@Override
public Boolean visit(BArrayType target, BType source) {
if (source.tag != TypeTags.ARRAY) {
return false;
}
BArrayType rhsArrayType = (BArrayType) source;
boolean hasSameOrderedTypeElements = isSameOrderedType(target.eType, rhsArrayType.eType, unresolvedTypes);
if (target.state == BArrayState.OPEN) {
return (rhsArrayType.state == BArrayState.OPEN) && hasSameOrderedTypeElements;
}
return hasSameOrderedTypeElements;
}
@Override
public Boolean visit(BTupleType target, BType source) {
if (source.tag != TypeTags.TUPLE || !hasSameReadonlyFlag(source, target)) {
return false;
}
BTupleType sourceT = (BTupleType) source;
BType sourceRestType = sourceT.restType;
BType targetRestType = target.restType;
int sourceTupleCount = sourceT.tupleTypes.size();
int targetTupleCount = target.tupleTypes.size();
int len = Math.min(sourceTupleCount, targetTupleCount);
for (int i = 0; i < len; i++) {
if (!isSameOrderedType(sourceT.getTupleTypes().get(i), target.tupleTypes.get(i),
this.unresolvedTypes)) {
return false;
}
}
if (sourceTupleCount == targetTupleCount) {
if (sourceRestType == null || targetRestType == null) {
return true;
}
return isSameOrderedType(sourceRestType, targetRestType, this.unresolvedTypes);
}
if (sourceTupleCount > targetTupleCount) {
return checkSameOrderedTypeInTuples(sourceT, sourceTupleCount, targetTupleCount, sourceRestType,
targetRestType);
}
return checkSameOrderedTypeInTuples(target, targetTupleCount, sourceTupleCount, targetRestType,
sourceRestType);
}
private boolean checkSameOrderedTypeInTuples(BTupleType source, int sourceTupleCount,
int targetTupleCount,
BType sourceRestType, BType targetRestType) {
if (targetRestType == null) {
return true;
}
for (int i = targetTupleCount; i < sourceTupleCount; i++) {
if (!isSameOrderedType(source.getTupleTypes().get(i), targetRestType, this.unresolvedTypes)) {
return false;
}
}
if (sourceRestType == null) {
return true;
}
return isSameOrderedType(sourceRestType, targetRestType, this.unresolvedTypes);
}
@Override
public Boolean visit(BUnionType target, BType source) {
if (source.tag != TypeTags.UNION || !hasSameReadonlyFlag(source, target)) {
return checkUnionHasSameType(target.getMemberTypes(), source);
}
BUnionType sUnionType = (BUnionType) source;
LinkedHashSet<BType> sourceTypes = sUnionType.getMemberTypes();
LinkedHashSet<BType> targetTypes = target.getMemberTypes();
if (checkUnionHasAllFiniteOrNilMembers(sourceTypes) &&
checkUnionHasAllFiniteOrNilMembers(targetTypes)) {
if (sourceTypes.contains(symTable.nilType) != targetTypes.contains(symTable.nilType)) {
return false;
}
return checkValueSpaceHasSameType(((BFiniteType) target.getMemberTypes().iterator().next()),
sUnionType.getMemberTypes().iterator().next());
}
if (sUnionType.getMemberTypes().size()
!= target.getMemberTypes().size()) {
return false;
}
return checkSameOrderedTypesInUnionMembers(sourceTypes, targetTypes);
}
private boolean checkSameOrderedTypesInUnionMembers(LinkedHashSet<BType> sourceTypes,
LinkedHashSet<BType> targetTypes) {
for (BType sourceT : sourceTypes) {
boolean foundSameOrderedType = false;
for (BType targetT : targetTypes) {
if (isSameOrderedType(targetT, sourceT, this.unresolvedTypes)) {
foundSameOrderedType = true;
break;
}
}
if (!foundSameOrderedType) {
return false;
}
}
return true;
}
@Override
public Boolean visit(BFiniteType t, BType s) {
return checkValueSpaceHasSameType(t, s);
}
private boolean hasSameReadonlyFlag(BType source, BType target) {
return Symbols.isFlagOn(target.flags, Flags.READONLY) == Symbols.isFlagOn(source.flags, Flags.READONLY);
}
@Override
public Boolean visit(BBuiltInRefType t, BType s) {
return false;
}
@Override
public Boolean visit(BAnyType t, BType s) {
return false;
}
@Override
public Boolean visit(BAnydataType t, BType s) {
return false;
}
@Override
public Boolean visit(BMapType t, BType s) {
return false;
}
@Override
public Boolean visit(BFutureType t, BType s) {
return false;
}
@Override
public Boolean visit(BXMLType t, BType s) {
return false;
}
@Override
public Boolean visit(BJSONType t, BType s) {
return false;
}
@Override
public Boolean visit(BObjectType t, BType s) {
return false;
}
@Override
public Boolean visit(BRecordType t, BType s) {
return false;
}
@Override
public Boolean visit(BStreamType t, BType s) {
return false;
}
@Override
public Boolean visit(BTableType t, BType s) {
return false;
}
@Override
public Boolean visit(BInvokableType t, BType s) {
return false;
}
@Override
public Boolean visit(BIntersectionType tIntersectionType, BType s) {
return false;
}
@Override
public Boolean visit(BErrorType t, BType s) {
return false;
}
@Override
public Boolean visit(BTypedescType t, BType s) {
return false;
}
@Override
public Boolean visit(BParameterizedType t, BType s) {
return false;
}
} |
We should use `Configuration` so we can pass `STORAGE_CONNECTION_STRING` via system properties or environment variables | public ServiceTest(TOptions options) {
super(options);
String connectionString = System.getenv("STORAGE_CONNECTION_STRING");
if (CoreUtils.isNullOrEmpty(connectionString)) {
System.out.println("Environment variable STORAGE_CONNECTION_STRING must be set");
System.exit(1);
}
shareServiceClient = new ShareServiceClientBuilder().connectionString(connectionString).
buildClient();
shareServiceAsyncClient = new ShareServiceClientBuilder().connectionString(connectionString).
buildAsyncClient();
} | String connectionString = System.getenv("STORAGE_CONNECTION_STRING"); | public ServiceTest(TOptions options) {
super(options);
configuration = Configuration.getGlobalConfiguration().clone();
String connectionString = configuration.get("STORAGE_CONNECTION_STRING");
if (CoreUtils.isNullOrEmpty(connectionString)) {
throw new IllegalStateException("Environment variable STORAGE_CONNECTION_STRING must be set");
}
shareServiceClient = new ShareServiceClientBuilder().connectionString(connectionString).
buildClient();
shareServiceAsyncClient = new ShareServiceClientBuilder().connectionString(connectionString).
buildAsyncClient();
} | class ServiceTest<TOptions extends PerfStressOptions> extends PerfStressTest<TOptions> {
protected final ShareServiceClient shareServiceClient;
protected final ShareServiceAsyncClient shareServiceAsyncClient;
} | class ServiceTest<TOptions extends PerfStressOptions> extends PerfStressTest<TOptions> {
protected static final long MAX_SHARE_SIZE = 4398046511104L;
protected final ShareServiceClient shareServiceClient;
protected final ShareServiceAsyncClient shareServiceAsyncClient;
private final Configuration configuration;
} |
We should throw an exception here if we aren't using Java 9+ so that users learn quickly that it won't work especially if they think it is setup but won't actually work in practice. | public static MemoryMonitor fromOptions(PipelineOptions options) {
DataflowPipelineDebugOptions debugOptions = options.as(DataflowPipelineDebugOptions.class);
DataflowWorkerHarnessOptions workerHarnessOptions =
options.as(DataflowWorkerHarnessOptions.class);
String uploadToGCSPath = debugOptions.getSaveHeapDumpsToGcsPath();
String workerId = workerHarnessOptions.getWorkerId();
boolean canDumpHeap = uploadToGCSPath != null || debugOptions.getDumpHeapOnOOM();
double gcThrashingPercentagePerPeriod = debugOptions.getGCThrashingPercentagePerPeriod();
Duration jfrProfileDuration;
if (uploadToGCSPath != null && debugOptions.getRecordJfrOnGcThrashing()) {
jfrProfileDuration = Duration.ofSeconds(debugOptions.getJfrRecordingDurationSec());
} else {
jfrProfileDuration = null;
}
return new MemoryMonitor(
new SystemGCStatsProvider(),
DEFAULT_SLEEP_TIME_MILLIS,
DEFAULT_SHUT_DOWN_AFTER_NUM_GCTHRASHING,
canDumpHeap,
gcThrashingPercentagePerPeriod,
uploadToGCSPath,
getLoggingDir(),
workerId,
jfrProfileDuration);
} | jfrProfileDuration = Duration.ofSeconds(debugOptions.getJfrRecordingDurationSec()); | public static MemoryMonitor fromOptions(PipelineOptions options) {
DataflowPipelineDebugOptions debugOptions = options.as(DataflowPipelineDebugOptions.class);
DataflowWorkerHarnessOptions workerHarnessOptions =
options.as(DataflowWorkerHarnessOptions.class);
String uploadToGCSPath = debugOptions.getSaveHeapDumpsToGcsPath();
String workerId = workerHarnessOptions.getWorkerId();
boolean canDumpHeap = uploadToGCSPath != null || debugOptions.getDumpHeapOnOOM();
double gcThrashingPercentagePerPeriod = debugOptions.getGCThrashingPercentagePerPeriod();
Duration jfrProfileDuration;
if (uploadToGCSPath != null && debugOptions.getRecordJfrOnGcThrashing()) {
if (Environments.getJavaVersion() == Environments.JavaVersion.java8) {
throw new IllegalArgumentException(
"recordJfrOnGcThrashing is only supported on java 9 and up.");
}
jfrProfileDuration = Duration.ofSeconds(debugOptions.getJfrRecordingDurationSec());
} else {
jfrProfileDuration = null;
}
return new MemoryMonitor(
new SystemGCStatsProvider(),
DEFAULT_SLEEP_TIME_MILLIS,
DEFAULT_SHUT_DOWN_AFTER_NUM_GCTHRASHING,
canDumpHeap,
gcThrashingPercentagePerPeriod,
uploadToGCSPath,
getLoggingDir(),
workerId,
jfrProfileDuration,
Clock.systemUTC());
} | class SystemGCStatsProvider implements GCStatsProvider {
@Override
public long totalGCTimeMilliseconds() {
long inGC = 0;
for (GarbageCollectorMXBean gc : ManagementFactory.getGarbageCollectorMXBeans()) {
inGC += gc.getCollectionTime();
}
return inGC;
}
} | class SystemGCStatsProvider implements GCStatsProvider {
@Override
public long totalGCTimeMilliseconds() {
long inGC = 0;
for (GarbageCollectorMXBean gc : ManagementFactory.getGarbageCollectorMXBeans()) {
inGC += gc.getCollectionTime();
}
return inGC;
}
} |
How about also abstracting the condition to a variable, since it is too long? ```suggestion boolean isAssignedByRowCount = Config.enable_schedule_insert_query_by_row_count && isLoad && scanRangeLocations.getScan_range().isSetInternal_scan_range() && scanRangeLocations.getScan_range().getInternal_scan_range().isSetRow_count() && scanRangeLocations.getScan_range().getInternal_scan_range().getRow_count() > 0; if (isAssignedByRowCount) { ``` | public void computeScanRangeAssignment() throws Exception {
HashMap<TNetworkAddress, Long> assignedRowCountPerHost = Maps.newHashMap();
if (Config.enable_schedule_insert_query_by_row_count && isLoad
&& locations.size() < 10240 && locations.size() > 0
&& locations.get(0).getScan_range().isSetInternal_scan_range()) {
locations.sort(new Comparator<TScanRangeLocations>() {
@Override
public int compare(TScanRangeLocations l, TScanRangeLocations r) {
return Long.compare(r.getScan_range().getInternal_scan_range().getRow_count(),
l.getScan_range().getInternal_scan_range().getRow_count());
}
});
}
for (TScanRangeLocations scanRangeLocations : locations) {
Long minAssignedBytes = Long.MAX_VALUE;
TScanRangeLocation minLocation = null;
for (final TScanRangeLocation location : scanRangeLocations.getLocations()) {
Long assignedBytes = BackendSelector.findOrInsert(assignedRowCountPerHost, location.server, 0L);
if (assignedBytes < minAssignedBytes) {
minAssignedBytes = assignedBytes;
minLocation = location;
}
}
if (minLocation == null) {
throw new UserException("Scan range not found" + backendInfosString(false));
}
if (Config.enable_schedule_insert_query_by_row_count && isLoad
&& scanRangeLocations.getScan_range().isSetInternal_scan_range()
&& scanRangeLocations.getScan_range().getInternal_scan_range().isSetRow_count()
&& scanRangeLocations.getScan_range().getInternal_scan_range().getRow_count() > 0) {
assignedRowCountPerHost.put(minLocation.server, assignedRowCountPerHost.get(minLocation.server)
+ scanRangeLocations.getScan_range().getInternal_scan_range().getRow_count());
} else {
assignedRowCountPerHost.put(minLocation.server, assignedRowCountPerHost.get(minLocation.server) + 1);
}
Reference<Long> backendIdRef = new Reference<>();
TNetworkAddress execHostPort = SimpleScheduler.getHost(minLocation.backend_id,
scanRangeLocations.getLocations(),
idToBackend, backendIdRef);
if (execHostPort == null) {
throw new UserException(FeConstants.BACKEND_NODE_NOT_FOUND_ERROR
+ backendInfosString(false));
}
recordUsedBackend(execHostPort, backendIdRef.getRef());
Map<Integer, List<TScanRangeParams>> scanRanges = BackendSelector.findOrInsert(
assignment, execHostPort, new HashMap<>());
List<TScanRangeParams> scanRangeParamsList = BackendSelector.findOrInsert(
scanRanges, scanNode.getId().asInt(), new ArrayList<>());
TScanRangeParams scanRangeParams = new TScanRangeParams();
scanRangeParams.scan_range = scanRangeLocations.scan_range;
scanRangeParamsList.add(scanRangeParams);
LOG.info("scan range {} is assigned to backend {}",
scanRangeLocations.scan_range, execHostPort);
}
if (LOG.isDebugEnabled()) {
LOG.debug("assignedRowCountPerHost: {}", assignedRowCountPerHost);
}
} | && scanRangeLocations.getScan_range().getInternal_scan_range().getRow_count() > 0) { | public void computeScanRangeAssignment() throws Exception {
HashMap<TNetworkAddress, Long> assignedRowCountPerHost = Maps.newHashMap();
if (locations.size() < 10240 && locations.size() > 0 && isEnableScheduleByRowCnt(locations.get(0))) {
locations.sort(new Comparator<TScanRangeLocations>() {
@Override
public int compare(TScanRangeLocations l, TScanRangeLocations r) {
return Long.compare(r.getScan_range().getInternal_scan_range().getRow_count(),
l.getScan_range().getInternal_scan_range().getRow_count());
}
});
}
for (TScanRangeLocations scanRangeLocations : locations) {
Long minAssignedBytes = Long.MAX_VALUE;
TScanRangeLocation minLocation = null;
for (final TScanRangeLocation location : scanRangeLocations.getLocations()) {
Long assignedBytes = BackendSelector.findOrInsert(assignedRowCountPerHost, location.server, 0L);
if (assignedBytes < minAssignedBytes) {
minAssignedBytes = assignedBytes;
minLocation = location;
}
}
if (minLocation == null) {
throw new UserException("Scan range not found" + backendInfosString(false));
}
if (isEnableScheduleByRowCnt(scanRangeLocations)) {
assignedRowCountPerHost.put(minLocation.server, assignedRowCountPerHost.get(minLocation.server)
+ scanRangeLocations.getScan_range().getInternal_scan_range().getRow_count());
} else {
assignedRowCountPerHost.put(minLocation.server, assignedRowCountPerHost.get(minLocation.server) + 1);
}
Reference<Long> backendIdRef = new Reference<>();
TNetworkAddress execHostPort = SimpleScheduler.getHost(minLocation.backend_id,
scanRangeLocations.getLocations(),
idToBackend, idToComputeNode, backendIdRef);
if (execHostPort == null) {
throw new UserException(FeConstants.BACKEND_NODE_NOT_FOUND_ERROR
+ backendInfosString(false));
}
recordUsedBackend(execHostPort, backendIdRef.getRef());
Map<Integer, List<TScanRangeParams>> scanRanges = BackendSelector.findOrInsert(
assignment, execHostPort, new HashMap<>());
List<TScanRangeParams> scanRangeParamsList = BackendSelector.findOrInsert(
scanRanges, scanNode.getId().asInt(), new ArrayList<>());
TScanRangeParams scanRangeParams = new TScanRangeParams();
scanRangeParams.scan_range = scanRangeLocations.scan_range;
scanRangeParamsList.add(scanRangeParams);
}
if (LOG.isDebugEnabled()) {
LOG.debug("assignedRowCountPerHost: {}", assignedRowCountPerHost);
}
} | class NormalBackendSelector implements BackendSelector {
private final ScanNode scanNode;
private final List<TScanRangeLocations> locations;
private final FragmentScanRangeAssignment assignment;
private boolean isLoad = false;
public NormalBackendSelector(ScanNode scanNode, List<TScanRangeLocations> locations,
FragmentScanRangeAssignment assignment) {
this.scanNode = scanNode;
this.locations = locations;
this.assignment = assignment;
}
public NormalBackendSelector(ScanNode scanNode, List<TScanRangeLocations> locations,
FragmentScanRangeAssignment assignment, boolean isLoad) {
this.scanNode = scanNode;
this.locations = locations;
this.assignment = assignment;
this.isLoad = isLoad;
}
@Override
} | class NormalBackendSelector implements BackendSelector {
private final ScanNode scanNode;
private final List<TScanRangeLocations> locations;
private final FragmentScanRangeAssignment assignment;
private final boolean isLoad;
public NormalBackendSelector(ScanNode scanNode, List<TScanRangeLocations> locations,
FragmentScanRangeAssignment assignment) {
this.scanNode = scanNode;
this.locations = locations;
this.assignment = assignment;
this.isLoad = false;
}
public NormalBackendSelector(ScanNode scanNode, List<TScanRangeLocations> locations,
FragmentScanRangeAssignment assignment, boolean isLoad) {
this.scanNode = scanNode;
this.locations = locations;
this.assignment = assignment;
this.isLoad = isLoad;
}
private boolean isEnableScheduleByRowCnt(TScanRangeLocations scanRangeLocations) {
return Config.enable_schedule_insert_query_by_row_count && isLoad
&& scanRangeLocations.getScan_range().isSetInternal_scan_range()
&& scanRangeLocations.getScan_range().getInternal_scan_range().isSetRow_count()
&& scanRangeLocations.getScan_range().getInternal_scan_range().getRow_count() > 0;
}
@Override
} |
```ballerina public function main() { record {| int j; |} x = {j: 1}; record { never i?; int j; } y = x; }` ``` For each of these, we need to make sure we add runtime tests also. | private boolean checkFieldEquivalency(BRecordType lhsType, BRecordType rhsType, Set<TypePair> unresolvedTypes) {
Map<String, BField> rhsFields = new LinkedHashMap<>(rhsType.fields);
for (BField lhsField : lhsType.fields.values()) {
BField rhsField = rhsFields.get(lhsField.name.value);
if (rhsField == null) {
if (!Symbols.isOptional(lhsField.symbol) || lhsField.type.tag == NEVER) {
return false;
}
continue;
}
if (hasIncompatibleReadOnlyFlags(lhsField.symbol.flags, rhsField.symbol.flags)) {
return false;
}
if (!Symbols.isOptional(lhsField.symbol) && Symbols.isOptional(rhsField.symbol)) {
return false;
}
if (!isAssignable(rhsField.type, lhsField.type, unresolvedTypes)) {
return false;
}
rhsFields.remove(lhsField.name.value);
}
if (lhsType.sealed) {
for (BField field : rhsFields.values()) {
if (!isNeverTypeOrStructureTypeWithARequiredNeverMember(field.type)) {
return false;
}
}
return true;
}
BType lhsRestFieldType = lhsType.restFieldType;
for (BField field : rhsFields.values()) {
if (!isAssignable(field.type, lhsRestFieldType, unresolvedTypes)) {
return false;
}
}
return true;
} | if (!Symbols.isOptional(lhsField.symbol) || lhsField.type.tag == NEVER) { | private boolean checkFieldEquivalency(BRecordType lhsType, BRecordType rhsType, Set<TypePair> unresolvedTypes) {
Map<String, BField> rhsFields = new LinkedHashMap<>(rhsType.fields);
for (BField lhsField : lhsType.fields.values()) {
BField rhsField = rhsFields.get(lhsField.name.value);
if (rhsField == null) {
if (!Symbols.isOptional(lhsField.symbol) || isInvalidNeverField(lhsField, rhsType)) {
return false;
}
continue;
}
if (hasIncompatibleReadOnlyFlags(lhsField.symbol.flags, rhsField.symbol.flags)) {
return false;
}
if (!Symbols.isOptional(lhsField.symbol) && Symbols.isOptional(rhsField.symbol)) {
return false;
}
if (!isAssignable(rhsField.type, lhsField.type, unresolvedTypes)) {
return false;
}
rhsFields.remove(lhsField.name.value);
}
if (lhsType.sealed) {
for (BField field : rhsFields.values()) {
if (!isNeverTypeOrStructureTypeWithARequiredNeverMember(field.type)) {
return false;
}
}
return true;
}
BType lhsRestFieldType = lhsType.restFieldType;
for (BField field : rhsFields.values()) {
if (!isAssignable(field.type, lhsRestFieldType, unresolvedTypes)) {
return false;
}
}
return true;
} | class BOrderedTypeVisitor implements BTypeVisitor<BType, Boolean> {
Set<TypePair> unresolvedTypes;
BOrderedTypeVisitor(Set<TypePair> unresolvedTypes) {
this.unresolvedTypes = unresolvedTypes;
}
@Override
public Boolean visit(BType target, BType source) {
if (isSimpleBasicType(source.tag) && isSimpleBasicType(target.tag)) {
return (source == target) || isIntOrStringType(source.tag, target.tag);
}
if (source.tag == TypeTags.FINITE) {
return checkValueSpaceHasSameType(((BFiniteType) source), target);
}
return isSameOrderedType(target, source, this.unresolvedTypes);
}
@Override
public Boolean visit(BArrayType target, BType source) {
if (source.tag != TypeTags.ARRAY) {
return false;
}
BArrayType rhsArrayType = (BArrayType) source;
boolean hasSameOrderedTypeElements = isSameOrderedType(target.eType, rhsArrayType.eType, unresolvedTypes);
if (target.state == BArrayState.OPEN) {
return (rhsArrayType.state == BArrayState.OPEN) && hasSameOrderedTypeElements;
}
return hasSameOrderedTypeElements;
}
@Override
public Boolean visit(BTupleType target, BType source) {
if (source.tag != TypeTags.TUPLE || !hasSameReadonlyFlag(source, target)) {
return false;
}
BTupleType sourceT = (BTupleType) source;
BType sourceRestType = sourceT.restType;
BType targetRestType = target.restType;
int sourceTupleCount = sourceT.tupleTypes.size();
int targetTupleCount = target.tupleTypes.size();
int len = Math.min(sourceTupleCount, targetTupleCount);
for (int i = 0; i < len; i++) {
if (!isSameOrderedType(sourceT.getTupleTypes().get(i), target.tupleTypes.get(i),
this.unresolvedTypes)) {
return false;
}
}
if (sourceTupleCount == targetTupleCount) {
if (sourceRestType == null || targetRestType == null) {
return true;
}
return isSameOrderedType(sourceRestType, targetRestType, this.unresolvedTypes);
}
if (sourceTupleCount > targetTupleCount) {
return checkSameOrderedTypeInTuples(sourceT, sourceTupleCount, targetTupleCount, sourceRestType,
targetRestType);
}
return checkSameOrderedTypeInTuples(target, targetTupleCount, sourceTupleCount, targetRestType,
sourceRestType);
}
private boolean checkSameOrderedTypeInTuples(BTupleType source, int sourceTupleCount,
int targetTupleCount,
BType sourceRestType, BType targetRestType) {
if (targetRestType == null) {
return true;
}
for (int i = targetTupleCount; i < sourceTupleCount; i++) {
if (!isSameOrderedType(source.getTupleTypes().get(i), targetRestType, this.unresolvedTypes)) {
return false;
}
}
if (sourceRestType == null) {
return true;
}
return isSameOrderedType(sourceRestType, targetRestType, this.unresolvedTypes);
}
@Override
public Boolean visit(BUnionType target, BType source) {
if (source.tag != TypeTags.UNION || !hasSameReadonlyFlag(source, target)) {
return checkUnionHasSameType(target.getMemberTypes(), source);
}
BUnionType sUnionType = (BUnionType) source;
LinkedHashSet<BType> sourceTypes = sUnionType.getMemberTypes();
LinkedHashSet<BType> targetTypes = target.getMemberTypes();
if (checkUnionHasAllFiniteOrNilMembers(sourceTypes) &&
checkUnionHasAllFiniteOrNilMembers(targetTypes)) {
if (sourceTypes.contains(symTable.nilType) != targetTypes.contains(symTable.nilType)) {
return false;
}
return checkValueSpaceHasSameType(((BFiniteType) target.getMemberTypes().iterator().next()),
sUnionType.getMemberTypes().iterator().next());
}
if (sUnionType.getMemberTypes().size()
!= target.getMemberTypes().size()) {
return false;
}
return checkSameOrderedTypesInUnionMembers(sourceTypes, targetTypes);
}
private boolean checkSameOrderedTypesInUnionMembers(LinkedHashSet<BType> sourceTypes,
LinkedHashSet<BType> targetTypes) {
for (BType sourceT : sourceTypes) {
boolean foundSameOrderedType = false;
for (BType targetT : targetTypes) {
if (isSameOrderedType(targetT, sourceT, this.unresolvedTypes)) {
foundSameOrderedType = true;
break;
}
}
if (!foundSameOrderedType) {
return false;
}
}
return true;
}
@Override
public Boolean visit(BFiniteType t, BType s) {
return checkValueSpaceHasSameType(t, s);
}
private boolean hasSameReadonlyFlag(BType source, BType target) {
return Symbols.isFlagOn(target.flags, Flags.READONLY) == Symbols.isFlagOn(source.flags, Flags.READONLY);
}
@Override
public Boolean visit(BBuiltInRefType t, BType s) {
return false;
}
@Override
public Boolean visit(BAnyType t, BType s) {
return false;
}
@Override
public Boolean visit(BAnydataType t, BType s) {
return false;
}
@Override
public Boolean visit(BMapType t, BType s) {
return false;
}
@Override
public Boolean visit(BFutureType t, BType s) {
return false;
}
@Override
public Boolean visit(BXMLType t, BType s) {
return false;
}
@Override
public Boolean visit(BJSONType t, BType s) {
return false;
}
@Override
public Boolean visit(BObjectType t, BType s) {
return false;
}
@Override
public Boolean visit(BRecordType t, BType s) {
return false;
}
@Override
public Boolean visit(BStreamType t, BType s) {
return false;
}
@Override
public Boolean visit(BTableType t, BType s) {
return false;
}
@Override
public Boolean visit(BInvokableType t, BType s) {
return false;
}
@Override
public Boolean visit(BIntersectionType tIntersectionType, BType s) {
return false;
}
@Override
public Boolean visit(BErrorType t, BType s) {
return false;
}
@Override
public Boolean visit(BTypedescType t, BType s) {
return false;
}
@Override
public Boolean visit(BParameterizedType t, BType s) {
return false;
}
} | class BOrderedTypeVisitor implements BTypeVisitor<BType, Boolean> {
Set<TypePair> unresolvedTypes;
BOrderedTypeVisitor(Set<TypePair> unresolvedTypes) {
this.unresolvedTypes = unresolvedTypes;
}
@Override
public Boolean visit(BType target, BType source) {
if (isSimpleBasicType(source.tag) && isSimpleBasicType(target.tag)) {
return (source == target) || isIntOrStringType(source.tag, target.tag);
}
if (source.tag == TypeTags.FINITE) {
return checkValueSpaceHasSameType(((BFiniteType) source), target);
}
return isSameOrderedType(target, source, this.unresolvedTypes);
}
@Override
public Boolean visit(BArrayType target, BType source) {
if (source.tag != TypeTags.ARRAY) {
return false;
}
BArrayType rhsArrayType = (BArrayType) source;
boolean hasSameOrderedTypeElements = isSameOrderedType(target.eType, rhsArrayType.eType, unresolvedTypes);
if (target.state == BArrayState.OPEN) {
return (rhsArrayType.state == BArrayState.OPEN) && hasSameOrderedTypeElements;
}
return hasSameOrderedTypeElements;
}
@Override
public Boolean visit(BTupleType target, BType source) {
if (source.tag != TypeTags.TUPLE || !hasSameReadonlyFlag(source, target)) {
return false;
}
BTupleType sourceT = (BTupleType) source;
BType sourceRestType = sourceT.restType;
BType targetRestType = target.restType;
int sourceTupleCount = sourceT.tupleTypes.size();
int targetTupleCount = target.tupleTypes.size();
int len = Math.min(sourceTupleCount, targetTupleCount);
for (int i = 0; i < len; i++) {
if (!isSameOrderedType(sourceT.getTupleTypes().get(i), target.tupleTypes.get(i),
this.unresolvedTypes)) {
return false;
}
}
if (sourceTupleCount == targetTupleCount) {
if (sourceRestType == null || targetRestType == null) {
return true;
}
return isSameOrderedType(sourceRestType, targetRestType, this.unresolvedTypes);
}
if (sourceTupleCount > targetTupleCount) {
return checkSameOrderedTypeInTuples(sourceT, sourceTupleCount, targetTupleCount, sourceRestType,
targetRestType);
}
return checkSameOrderedTypeInTuples(target, targetTupleCount, sourceTupleCount, targetRestType,
sourceRestType);
}
private boolean checkSameOrderedTypeInTuples(BTupleType source, int sourceTupleCount,
int targetTupleCount,
BType sourceRestType, BType targetRestType) {
if (targetRestType == null) {
return true;
}
for (int i = targetTupleCount; i < sourceTupleCount; i++) {
if (!isSameOrderedType(source.getTupleTypes().get(i), targetRestType, this.unresolvedTypes)) {
return false;
}
}
if (sourceRestType == null) {
return true;
}
return isSameOrderedType(sourceRestType, targetRestType, this.unresolvedTypes);
}
@Override
public Boolean visit(BUnionType target, BType source) {
if (source.tag != TypeTags.UNION || !hasSameReadonlyFlag(source, target)) {
return checkUnionHasSameType(target.getMemberTypes(), source);
}
BUnionType sUnionType = (BUnionType) source;
LinkedHashSet<BType> sourceTypes = sUnionType.getMemberTypes();
LinkedHashSet<BType> targetTypes = target.getMemberTypes();
if (checkUnionHasAllFiniteOrNilMembers(sourceTypes) &&
checkUnionHasAllFiniteOrNilMembers(targetTypes)) {
if (sourceTypes.contains(symTable.nilType) != targetTypes.contains(symTable.nilType)) {
return false;
}
return checkValueSpaceHasSameType(((BFiniteType) target.getMemberTypes().iterator().next()),
sUnionType.getMemberTypes().iterator().next());
}
if (sUnionType.getMemberTypes().size()
!= target.getMemberTypes().size()) {
return false;
}
return checkSameOrderedTypesInUnionMembers(sourceTypes, targetTypes);
}
private boolean checkSameOrderedTypesInUnionMembers(LinkedHashSet<BType> sourceTypes,
LinkedHashSet<BType> targetTypes) {
for (BType sourceT : sourceTypes) {
boolean foundSameOrderedType = false;
for (BType targetT : targetTypes) {
if (isSameOrderedType(targetT, sourceT, this.unresolvedTypes)) {
foundSameOrderedType = true;
break;
}
}
if (!foundSameOrderedType) {
return false;
}
}
return true;
}
@Override
public Boolean visit(BFiniteType t, BType s) {
return checkValueSpaceHasSameType(t, s);
}
private boolean hasSameReadonlyFlag(BType source, BType target) {
return Symbols.isFlagOn(target.flags, Flags.READONLY) == Symbols.isFlagOn(source.flags, Flags.READONLY);
}
@Override
public Boolean visit(BBuiltInRefType t, BType s) {
return false;
}
@Override
public Boolean visit(BAnyType t, BType s) {
return false;
}
@Override
public Boolean visit(BAnydataType t, BType s) {
return false;
}
@Override
public Boolean visit(BMapType t, BType s) {
return false;
}
@Override
public Boolean visit(BFutureType t, BType s) {
return false;
}
@Override
public Boolean visit(BXMLType t, BType s) {
return false;
}
@Override
public Boolean visit(BJSONType t, BType s) {
return false;
}
@Override
public Boolean visit(BObjectType t, BType s) {
return false;
}
@Override
public Boolean visit(BRecordType t, BType s) {
return false;
}
@Override
public Boolean visit(BStreamType t, BType s) {
return false;
}
@Override
public Boolean visit(BTableType t, BType s) {
return false;
}
@Override
public Boolean visit(BInvokableType t, BType s) {
return false;
}
@Override
public Boolean visit(BIntersectionType tIntersectionType, BType s) {
return false;
}
@Override
public Boolean visit(BErrorType t, BType s) {
return false;
}
@Override
public Boolean visit(BTypedescType t, BType s) {
return false;
}
@Override
public Boolean visit(BParameterizedType t, BType s) {
return false;
}
} |
I believe this (and below) exception messages are not correct anymore. | public void open(TableInputSplit split) throws IOException {
initTable();
if (table == null) {
throw new IOException("The HBase table has not been opened! " +
"This needs to be done in configure().");
}
if (scan == null) {
throw new IOException("Scan has not been initialized! " +
"This needs to be done in configure().");
}
if (split == null) {
throw new IOException("Input split is null!");
}
logSplitInfo("opening", split);
currentRow = split.getStartRow();
scan.setStartRow(currentRow);
scan.setStopRow(split.getEndRow());
resultScanner = table.getScanner(scan);
endReached = false;
scannedRows = 0;
} | "This needs to be done in configure()."); | public void open(TableInputSplit split) throws IOException {
initTable();
if (split == null) {
throw new IOException("Input split is null!");
}
logSplitInfo("opening", split);
currentRow = split.getStartRow();
scan.setStartRow(currentRow);
scan.setStopRow(split.getEndRow());
resultScanner = table.getScanner(scan);
endReached = false;
scannedRows = 0;
} | class AbstractTableInputFormat<T> extends RichInputFormat<T, TableInputSplit> {
protected static final Logger LOG = LoggerFactory.getLogger(AbstractTableInputFormat.class);
private static final long serialVersionUID = 1L;
protected boolean endReached = false;
protected transient Connection connection = null;
protected transient HTable table = null;
protected transient Scan scan = null;
/** HBase iterator wrapper. */
protected ResultScanner resultScanner = null;
protected byte[] currentRow;
protected long scannedRows;
protected byte[] serializedConfig;
public AbstractTableInputFormat(org.apache.hadoop.conf.Configuration hConf) {
serializedConfig = HBaseConfigurationUtil.serializeConfiguration(hConf);
}
/**
* Creates a {@link Scan} object and opens the {@link HTable} connection to initialize the HBase table.
*/
protected abstract void initTable();
/**
* Returns an instance of Scan that retrieves the required subset of records from the HBase table.
*
* @return The appropriate instance of Scan for this use case.
*/
protected abstract Scan getScanner();
/**
* What table is to be read.
*
* <p>Per instance of a TableInputFormat derivative only a single table name is possible.
*
* @return The name of the table
*/
protected abstract String getTableName();
/**
* HBase returns an instance of {@link Result}.
*
* <p>This method maps the returned {@link Result} instance into the output type {@link T}.
*
* @param r The Result instance from HBase that needs to be converted
* @return The appropriate instance of {@link T} that contains the data of Result.
*/
protected abstract T mapResultToOutType(Result r);
@Override
public void configure(Configuration parameters) {
}
protected org.apache.hadoop.conf.Configuration getHadoopConfiguration() {
return HBaseConfigurationUtil.deserializeConfiguration(serializedConfig, HBaseConfigurationUtil.getHBaseConfiguration());
}
/**
* Creates a {@link Scan} object and opens the {@link HTable} connection.
* The connection is opened in this method and closed in {@link
*
* @param split The split to be opened.
* @throws IOException Thrown, if the spit could not be opened due to an I/O problem.
*/
@Override
public T nextRecord(T reuse) throws IOException {
if (resultScanner == null) {
throw new IOException("No table result scanner provided!");
}
Result res;
try {
res = resultScanner.next();
} catch (Exception e) {
resultScanner.close();
LOG.warn("Error after scan of " + scannedRows + " rows. Retry with a new scanner...", e);
scan.withStartRow(currentRow, false);
resultScanner = table.getScanner(scan);
res = resultScanner.next();
}
if (res != null) {
scannedRows++;
currentRow = res.getRow();
return mapResultToOutType(res);
}
endReached = true;
return null;
}
private void logSplitInfo(String action, TableInputSplit split) {
int splitId = split.getSplitNumber();
String splitStart = Bytes.toString(split.getStartRow());
String splitEnd = Bytes.toString(split.getEndRow());
String splitStartKey = splitStart.isEmpty() ? "-" : splitStart;
String splitStopKey = splitEnd.isEmpty() ? "-" : splitEnd;
String[] hostnames = split.getHostnames();
LOG.info("{} split (this={})[{}|{}|{}|{}]", action, this, splitId, hostnames, splitStartKey, splitStopKey);
}
@Override
public boolean reachedEnd() throws IOException {
return endReached;
}
@Override
public void close() throws IOException {
LOG.info("Closing split (scanned {} rows)", scannedRows);
currentRow = null;
try {
if (resultScanner != null) {
resultScanner.close();
}
if (table != null) {
table.close();
}
if (connection != null) {
connection.close();
}
} finally {
resultScanner = null;
table = null;
connection = null;
}
}
@Override
public TableInputSplit[] createInputSplits(final int minNumSplits) throws IOException {
initTable();
if (table == null) {
throw new IOException("The HBase table has not been opened! " +
"This needs to be done in configure().");
}
if (scan == null) {
throw new IOException("Scan has not been initialized! " +
"This needs to be done in configure().");
}
final Pair<byte[][], byte[][]> keys = table.getRegionLocator().getStartEndKeys();
if (keys == null || keys.getFirst() == null || keys.getFirst().length == 0) {
throw new IOException("Expecting at least one region.");
}
final byte[] startRow = scan.getStartRow();
final byte[] stopRow = scan.getStopRow();
final boolean scanWithNoLowerBound = startRow.length == 0;
final boolean scanWithNoUpperBound = stopRow.length == 0;
final List<TableInputSplit> splits = new ArrayList<TableInputSplit>(minNumSplits);
for (int i = 0; i < keys.getFirst().length; i++) {
final byte[] startKey = keys.getFirst()[i];
final byte[] endKey = keys.getSecond()[i];
final String regionLocation = table.getRegionLocator().getRegionLocation(startKey, false).getHostnamePort();
if (!includeRegionInScan(startKey, endKey)) {
continue;
}
final String[] hosts = new String[]{regionLocation};
boolean isLastRegion = endKey.length == 0;
if ((scanWithNoLowerBound || isLastRegion || Bytes.compareTo(startRow, endKey) < 0) &&
(scanWithNoUpperBound || Bytes.compareTo(stopRow, startKey) > 0)) {
final byte[] splitStart = scanWithNoLowerBound || Bytes.compareTo(startKey, startRow) >= 0 ? startKey : startRow;
final byte[] splitStop = (scanWithNoUpperBound || Bytes.compareTo(endKey, stopRow) <= 0)
&& !isLastRegion ? endKey : stopRow;
int id = splits.size();
final TableInputSplit split = new TableInputSplit(id, hosts, table.getTableName(), splitStart, splitStop);
splits.add(split);
}
}
LOG.info("Created " + splits.size() + " splits");
for (TableInputSplit split : splits) {
logSplitInfo("created", split);
}
return splits.toArray(new TableInputSplit[splits.size()]);
}
/**
* Test if the given region is to be included in the scan while splitting the regions of a table.
*
* @param startKey Start key of the region
* @param endKey End key of the region
* @return true, if this region needs to be included as part of the input (default).
*/
protected boolean includeRegionInScan(final byte[] startKey, final byte[] endKey) {
return true;
}
@Override
public InputSplitAssigner getInputSplitAssigner(TableInputSplit[] inputSplits) {
return new LocatableInputSplitAssigner(inputSplits);
}
@Override
public BaseStatistics getStatistics(BaseStatistics cachedStatistics) {
return null;
}
@VisibleForTesting
public Connection getConnection() {
return connection;
}
} | class AbstractTableInputFormat<T> extends RichInputFormat<T, TableInputSplit> {
protected static final Logger LOG = LoggerFactory.getLogger(AbstractTableInputFormat.class);
private static final long serialVersionUID = 1L;
protected boolean endReached = false;
protected transient Connection connection = null;
protected transient HTable table = null;
protected transient Scan scan = null;
/** HBase iterator wrapper. */
protected ResultScanner resultScanner = null;
protected byte[] currentRow;
protected long scannedRows;
protected byte[] serializedConfig;
public AbstractTableInputFormat(org.apache.hadoop.conf.Configuration hConf) {
serializedConfig = HBaseConfigurationUtil.serializeConfiguration(hConf);
}
/**
* Creates a {@link Scan} object and opens the {@link HTable} connection to initialize the HBase table.
*
* @throws IOException Thrown, if the connection could not be opened due to an I/O problem.
*/
protected abstract void initTable() throws IOException;
/**
* Returns an instance of Scan that retrieves the required subset of records from the HBase table.
*
* @return The appropriate instance of Scan for this use case.
*/
protected abstract Scan getScanner();
/**
* What table is to be read.
*
* <p>Per instance of a TableInputFormat derivative only a single table name is possible.
*
* @return The name of the table
*/
protected abstract String getTableName();
/**
* HBase returns an instance of {@link Result}.
*
* <p>This method maps the returned {@link Result} instance into the output type {@link T}.
*
* @param r The Result instance from HBase that needs to be converted
* @return The appropriate instance of {@link T} that contains the data of Result.
*/
protected abstract T mapResultToOutType(Result r);
@Override
public void configure(Configuration parameters) {
}
protected org.apache.hadoop.conf.Configuration getHadoopConfiguration() {
return HBaseConfigurationUtil.deserializeConfiguration(serializedConfig, HBaseConfigurationUtil.getHBaseConfiguration());
}
/**
* Creates a {@link Scan} object and opens the {@link HTable} connection.
* The connection is opened in this method and closed in {@link
*
* @param split The split to be opened.
* @throws IOException Thrown, if the spit could not be opened due to an I/O problem.
*/
@Override
public T nextRecord(T reuse) throws IOException {
if (resultScanner == null) {
throw new IOException("No table result scanner provided!");
}
Result res;
try {
res = resultScanner.next();
} catch (Exception e) {
resultScanner.close();
LOG.warn("Error after scan of " + scannedRows + " rows. Retry with a new scanner...", e);
scan.withStartRow(currentRow, false);
resultScanner = table.getScanner(scan);
res = resultScanner.next();
}
if (res != null) {
scannedRows++;
currentRow = res.getRow();
return mapResultToOutType(res);
}
endReached = true;
return null;
}
private void logSplitInfo(String action, TableInputSplit split) {
int splitId = split.getSplitNumber();
String splitStart = Bytes.toString(split.getStartRow());
String splitEnd = Bytes.toString(split.getEndRow());
String splitStartKey = splitStart.isEmpty() ? "-" : splitStart;
String splitStopKey = splitEnd.isEmpty() ? "-" : splitEnd;
String[] hostnames = split.getHostnames();
LOG.info("{} split (this={})[{}|{}|{}|{}]", action, this, splitId, hostnames, splitStartKey, splitStopKey);
}
@Override
public boolean reachedEnd() throws IOException {
return endReached;
}
@Override
public void close() throws IOException {
LOG.info("Closing split (scanned {} rows)", scannedRows);
currentRow = null;
try {
if (resultScanner != null) {
resultScanner.close();
}
closeTable();
} finally {
resultScanner = null;
}
}
public void closeTable() {
if (table != null) {
try {
table.close();
} catch (IOException e) {
LOG.warn("Exception occurs while closing HBase Table.", e);
}
table = null;
}
if (connection != null) {
try {
connection.close();
} catch (IOException e) {
LOG.warn("Exception occurs while closing HBase Connection.", e);
}
connection = null;
}
}
@Override
public TableInputSplit[] createInputSplits(final int minNumSplits) throws IOException {
try {
initTable();
final Pair<byte[][], byte[][]> keys = table.getRegionLocator().getStartEndKeys();
if (keys == null || keys.getFirst() == null || keys.getFirst().length == 0) {
throw new IOException("Expecting at least one region.");
}
final byte[] startRow = scan.getStartRow();
final byte[] stopRow = scan.getStopRow();
final boolean scanWithNoLowerBound = startRow.length == 0;
final boolean scanWithNoUpperBound = stopRow.length == 0;
final List<TableInputSplit> splits = new ArrayList<>(minNumSplits);
for (int i = 0; i < keys.getFirst().length; i++) {
final byte[] startKey = keys.getFirst()[i];
final byte[] endKey = keys.getSecond()[i];
final String regionLocation = table.getRegionLocator().getRegionLocation(startKey, false).getHostnamePort();
if (!includeRegionInScan(startKey, endKey)) {
continue;
}
final String[] hosts = new String[]{regionLocation};
boolean isLastRegion = endKey.length == 0;
if ((scanWithNoLowerBound || isLastRegion || Bytes.compareTo(startRow, endKey) < 0) &&
(scanWithNoUpperBound || Bytes.compareTo(stopRow, startKey) > 0)) {
final byte[] splitStart = scanWithNoLowerBound || Bytes.compareTo(startKey, startRow) >= 0 ? startKey : startRow;
final byte[] splitStop = (scanWithNoUpperBound || Bytes.compareTo(endKey, stopRow) <= 0)
&& !isLastRegion ? endKey : stopRow;
int id = splits.size();
final TableInputSplit split = new TableInputSplit(id, hosts, table.getTableName(), splitStart, splitStop);
splits.add(split);
}
}
LOG.info("Created " + splits.size() + " splits");
for (TableInputSplit split : splits) {
logSplitInfo("created", split);
}
return splits.toArray(new TableInputSplit[splits.size()]);
} finally {
closeTable();
}
}
/**
* Test if the given region is to be included in the scan while splitting the regions of a table.
*
* @param startKey Start key of the region
* @param endKey End key of the region
* @return true, if this region needs to be included as part of the input (default).
*/
protected boolean includeRegionInScan(final byte[] startKey, final byte[] endKey) {
return true;
}
@Override
public InputSplitAssigner getInputSplitAssigner(TableInputSplit[] inputSplits) {
return new LocatableInputSplitAssigner(inputSplits);
}
@Override
public BaseStatistics getStatistics(BaseStatistics cachedStatistics) {
return null;
}
@VisibleForTesting
public Connection getConnection() {
return connection;
}
} |
Since we need to evaluate the constant expressions for identifying the reinitialized (with different values) enum members, I moved this check into 'ConstantValueResolver'. | public boolean checkForUniqueSymbol(Location pos, SymbolEnv env, BSymbol symbol) {
BSymbol foundSym = symTable.notFoundSymbol;
int expSymTag = symbol.tag;
if ((expSymTag & SymTag.IMPORT) == SymTag.IMPORT) {
foundSym = lookupSymbolInPrefixSpace(env, symbol.name);
} else if ((expSymTag & SymTag.ANNOTATION) == SymTag.ANNOTATION) {
foundSym = lookupSymbolInAnnotationSpace(env, symbol.name);
} else if ((expSymTag & SymTag.CONSTRUCTOR) == SymTag.CONSTRUCTOR) {
foundSym = lookupSymbolInConstructorSpace(env, symbol.name);
} else if ((expSymTag & SymTag.MAIN) == SymTag.MAIN) {
foundSym = lookupSymbolForDecl(env, symbol.name, SymTag.MAIN);
}
if (foundSym == symTable.notFoundSymbol && symbol.tag == SymTag.FUNCTION) {
int dotPosition = symbol.name.value.indexOf('.');
if (dotPosition > 0 && dotPosition != symbol.name.value.length()) {
String funcName = symbol.name.value.substring(dotPosition + 1);
foundSym = lookupSymbolForDecl(env, names.fromString(funcName), SymTag.MAIN);
}
}
if (foundSym == symTable.notFoundSymbol) {
return true;
}
if (!isDistinctSymbol(pos, symbol, foundSym)) {
return false;
}
if (isRedeclaredSymbol(symbol, foundSym)) {
Name name = symbol.name;
if (Symbols.isRemote(symbol) && !Symbols.isRemote(foundSym)
|| !Symbols.isRemote(symbol) && Symbols.isRemote(foundSym)) {
dlog.error(pos, DiagnosticErrorCode.UNSUPPORTED_REMOTE_METHOD_NAME_IN_SCOPE, name);
return false;
}
if (symbol.kind == SymbolKind.CONSTANT && symbol.type.tag == TypeTags.FINITE) {
BConstantSymbol symbolConst = (BConstantSymbol) symbol;
BConstantSymbol foundSymConst = (BConstantSymbol) foundSym;
BLangLiteral symbolTypeLiteral = (BLangLiteral) ((BFiniteType)
symbolConst.type).getValueSpace().iterator().next();
String symbolValue = (symbolTypeLiteral.value == null) ?
symbolTypeLiteral.originalValue : String.valueOf(symbolTypeLiteral.value);
BLangLiteral foundSymTypeLiteral = (BLangLiteral) ((BFiniteType)
foundSymConst.type).getValueSpace().iterator().next();
String foundSymValue = (foundSymTypeLiteral.value == null) ?
foundSymTypeLiteral.originalValue : String.valueOf(foundSymTypeLiteral.value);
int symbolLiteralType = symbolConst.literalType.tag;
int foundLiteralSymType = foundSymConst.literalType.tag;
if ((!symbolValue.equals(foundSymValue)) || (symbolLiteralType != foundLiteralSymType)) {
dlog.error(pos, DiagnosticErrorCode.REDECLARED_SYMBOL, name);
}
} else {
dlog.error(pos, DiagnosticErrorCode.REDECLARED_SYMBOL, name);
}
return false;
}
if ((foundSym.tag & SymTag.SERVICE) == SymTag.SERVICE) {
return false;
}
return true;
} | } | public boolean checkForUniqueSymbol(Location pos, SymbolEnv env, BSymbol symbol) {
BSymbol foundSym = symTable.notFoundSymbol;
int expSymTag = symbol.tag;
if ((expSymTag & SymTag.IMPORT) == SymTag.IMPORT) {
foundSym = lookupSymbolInPrefixSpace(env, symbol.name);
} else if ((expSymTag & SymTag.ANNOTATION) == SymTag.ANNOTATION) {
foundSym = lookupSymbolInAnnotationSpace(env, symbol.name);
} else if ((expSymTag & SymTag.CONSTRUCTOR) == SymTag.CONSTRUCTOR) {
foundSym = lookupSymbolInConstructorSpace(env, symbol.name);
} else if ((expSymTag & SymTag.MAIN) == SymTag.MAIN) {
foundSym = lookupSymbolForDecl(env, symbol.name, SymTag.MAIN);
}
if (foundSym == symTable.notFoundSymbol && symbol.tag == SymTag.FUNCTION) {
int dotPosition = symbol.name.value.indexOf('.');
if (dotPosition > 0 && dotPosition != symbol.name.value.length()) {
String funcName = symbol.name.value.substring(dotPosition + 1);
foundSym = lookupSymbolForDecl(env, names.fromString(funcName), SymTag.MAIN);
}
}
if (foundSym == symTable.notFoundSymbol) {
return true;
}
if (!isDistinctSymbol(pos, symbol, foundSym)) {
return false;
}
if (isRedeclaredSymbol(symbol, foundSym)) {
Name name = symbol.name;
if (Symbols.isRemote(symbol) && !Symbols.isRemote(foundSym)
|| !Symbols.isRemote(symbol) && Symbols.isRemote(foundSym)) {
dlog.error(pos, DiagnosticErrorCode.UNSUPPORTED_REMOTE_METHOD_NAME_IN_SCOPE, name);
return false;
}
if (symbol.kind != SymbolKind.CONSTANT) {
dlog.error(pos, DiagnosticErrorCode.REDECLARED_SYMBOL, name);
}
return false;
}
if ((foundSym.tag & SymTag.SERVICE) == SymTag.SERVICE) {
return false;
}
return true;
} | class SymbolResolver extends BLangNodeVisitor {
private static final CompilerContext.Key<SymbolResolver> SYMBOL_RESOLVER_KEY =
new CompilerContext.Key<>();
private SymbolTable symTable;
private Names names;
private BLangDiagnosticLog dlog;
private Types types;
private SymbolEnv env;
private BType resultType;
private DiagnosticCode diagCode;
private SymbolEnter symbolEnter;
private BLangAnonymousModelHelper anonymousModelHelper;
private BLangMissingNodesHelper missingNodesHelper;
private Unifier unifier;
public static SymbolResolver getInstance(CompilerContext context) {
SymbolResolver symbolResolver = context.get(SYMBOL_RESOLVER_KEY);
if (symbolResolver == null) {
symbolResolver = new SymbolResolver(context);
}
return symbolResolver;
}
public SymbolResolver(CompilerContext context) {
context.put(SYMBOL_RESOLVER_KEY, this);
this.symTable = SymbolTable.getInstance(context);
this.names = Names.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
this.types = Types.getInstance(context);
this.symbolEnter = SymbolEnter.getInstance(context);
this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context);
this.missingNodesHelper = BLangMissingNodesHelper.getInstance(context);
this.unifier = new Unifier();
}
private boolean isRedeclaredSymbol(BSymbol symbol, BSymbol foundSym) {
return hasSameOwner(symbol, foundSym) || isSymbolRedeclaredInTestPackage(symbol, foundSym);
}
public boolean checkForUniqueSymbol(SymbolEnv env, BSymbol symbol) {
BSymbol foundSym = lookupSymbolInMainSpace(env, symbol.name);
if (foundSym == symTable.notFoundSymbol) {
return true;
}
if (symbol.tag == SymTag.CONSTRUCTOR && foundSym.tag == SymTag.ERROR) {
return false;
}
return !hasSameOwner(symbol, foundSym);
}
/**
* This method will check whether the given symbol that is being defined is unique by only checking its current
* environment scope.
*
* @param pos symbol pos for diagnostic purpose.
* @param env symbol environment to lookup.
* @param symbol the symbol that is being defined.
* @param expSymTag expected tag of the symbol for.
* @return true if the symbol is unique, false otherwise.
*/
public boolean checkForUniqueSymbolInCurrentScope(Location pos, SymbolEnv env, BSymbol symbol,
int expSymTag) {
BSymbol foundSym = lookupSymbolInGivenScope(env, symbol.name, expSymTag);
if (foundSym == symTable.notFoundSymbol) {
return true;
}
return isDistinctSymbol(pos, symbol, foundSym);
}
/**
* This method will check whether the symbol being defined is unique comparing it with the found symbol
* from the scope.
*
* @param pos symbol pos for diagnostic purpose.
* @param symbol symbol that is being defined.
* @param foundSym symbol that is found from the scope.
* @return true if the symbol is unique, false otherwise.
*/
private boolean isDistinctSymbol(Location pos, BSymbol symbol, BSymbol foundSym) {
if (symbol.tag == SymTag.CONSTRUCTOR && foundSym.tag == SymTag.ERROR) {
return false;
}
if (isSymbolDefinedInRootPkgLvl(foundSym)) {
dlog.error(pos, DiagnosticErrorCode.REDECLARED_BUILTIN_SYMBOL, symbol.name);
return false;
}
return true;
}
/**
* This method will check whether the symbol being defined is unique comparing it with the found symbol
* from the scope.
*
* @param symbol symbol that is being defined.
* @param foundSym symbol that is found from the scope.
* @return true if the symbol is unique, false otherwise.
*/
private boolean isDistinctSymbol(BSymbol symbol, BSymbol foundSym) {
if (symbol.tag == SymTag.CONSTRUCTOR && foundSym.tag == SymTag.ERROR) {
return false;
}
if (isSymbolDefinedInRootPkgLvl(foundSym)) {
return false;
}
return !hasSameOwner(symbol, foundSym);
}
private boolean hasSameOwner(BSymbol symbol, BSymbol foundSym) {
if (foundSym.owner == symbol.owner ||
(foundSym.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE &&
(symbol.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE &&
foundSym.pkgID.equals(symbol.pkgID)) {
return true;
} else if (Symbols.isFlagOn(symbol.owner.flags, Flags.LAMBDA) &&
((foundSym.owner.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE)) {
return true;
} else if (((symbol.owner.tag & SymTag.LET) == SymTag.LET) &&
((foundSym.owner.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE)) {
return true;
}
return false;
}
private boolean isSymbolRedeclaredInTestPackage(BSymbol symbol, BSymbol foundSym) {
if (Symbols.isFlagOn(symbol.owner.flags, Flags.TESTABLE) &&
!Symbols.isFlagOn(foundSym.owner.flags, Flags.TESTABLE)) {
return true;
}
return false;
}
private boolean isSymbolDefinedInRootPkgLvl(BSymbol foundSym) {
return symTable.rootPkgSymbol.pkgID.equals(foundSym.pkgID) &&
(foundSym.tag & SymTag.VARIABLE_NAME) == SymTag.VARIABLE_NAME;
}
/**
* Lookup the symbol using given name in the given environment scope only.
*
* @param env environment to lookup the symbol.
* @param name name of the symbol to lookup.
* @param expSymTag expected tag of the symbol.
* @return if a symbol is found return it.
*/
public BSymbol lookupSymbolInGivenScope(SymbolEnv env, Name name, int expSymTag) {
ScopeEntry entry = env.scope.lookup(name);
while (entry != NOT_FOUND_ENTRY) {
if (symTable.rootPkgSymbol.pkgID.equals(entry.symbol.pkgID) &&
(entry.symbol.tag & SymTag.VARIABLE_NAME) == SymTag.VARIABLE_NAME) {
return entry.symbol;
}
if ((entry.symbol.tag & expSymTag) == expSymTag && !isFieldRefFromWithinARecord(entry.symbol, env)) {
return entry.symbol;
}
entry = entry.next;
}
return symTable.notFoundSymbol;
}
public boolean checkForUniqueMemberSymbol(Location pos, SymbolEnv env, BSymbol symbol) {
BSymbol foundSym = lookupMemberSymbol(pos, env.scope, env, symbol.name, symbol.tag);
if (foundSym != symTable.notFoundSymbol) {
dlog.error(pos, DiagnosticErrorCode.REDECLARED_SYMBOL, symbol.name);
return false;
}
return true;
}
public BSymbol resolveBinaryOperator(OperatorKind opKind,
BType lhsType,
BType rhsType) {
return resolveOperator(names.fromString(opKind.value()), Lists.of(lhsType, rhsType));
}
BSymbol createEqualityOperator(OperatorKind opKind, BType lhsType, BType rhsType) {
List<BType> paramTypes = Lists.of(lhsType, rhsType);
BType retType = symTable.booleanType;
BInvokableType opType = new BInvokableType(paramTypes, retType, null);
return new BOperatorSymbol(names.fromString(opKind.value()), null, opType, null, symTable.builtinPos, VIRTUAL);
}
public BSymbol resolveUnaryOperator(Location pos,
OperatorKind opKind,
BType type) {
return resolveOperator(names.fromString(opKind.value()), Lists.of(type));
}
public BSymbol resolveOperator(Name name, List<BType> types) {
ScopeEntry entry = symTable.rootScope.lookup(name);
return resolveOperator(entry, types);
}
BSymbol createBinaryComparisonOperator(OperatorKind opKind, BType lhsType, BType rhsType) {
List<BType> paramTypes = Lists.of(lhsType, rhsType);
BInvokableType opType = new BInvokableType(paramTypes, symTable.booleanType, null);
return new BOperatorSymbol(names.fromString(opKind.value()), null, opType, null, symTable.builtinPos, VIRTUAL);
}
BSymbol createBinaryOperator(OperatorKind opKind, BType lhsType, BType rhsType, BType retType) {
List<BType> paramTypes = Lists.of(lhsType, rhsType);
BInvokableType opType = new BInvokableType(paramTypes, retType, null);
return new BOperatorSymbol(names.fromString(opKind.value()), null, opType, null, symTable.builtinPos, VIRTUAL);
}
public BSymbol resolvePkgSymbol(Location pos, SymbolEnv env, Name pkgAlias) {
if (pkgAlias == Names.EMPTY) {
return env.enclPkg.symbol;
}
BSymbol pkgSymbol = lookupSymbolInPrefixSpace(env, pkgAlias);
if (pkgSymbol == symTable.notFoundSymbol) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_MODULE, pkgAlias.value);
}
return pkgSymbol;
}
public BSymbol resolvePrefixSymbol(SymbolEnv env, Name pkgAlias, Name compUnit) {
if (pkgAlias == Names.EMPTY) {
return env.enclPkg.symbol;
}
ScopeEntry entry = env.scope.lookup(pkgAlias);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.XMLNS) == SymTag.XMLNS) {
return entry.symbol;
}
if ((entry.symbol.tag & SymTag.IMPORT) == SymTag.IMPORT &&
((BPackageSymbol) entry.symbol).compUnit.equals(compUnit)) {
((BPackageSymbol) entry.symbol).isUsed = true;
return entry.symbol;
}
entry = entry.next;
}
if (env.enclEnv != null) {
return resolvePrefixSymbol(env.enclEnv, pkgAlias, compUnit);
}
return symTable.notFoundSymbol;
}
public BSymbol resolveAnnotation(Location pos, SymbolEnv env, Name pkgAlias, Name annotationName) {
return this.lookupAnnotationSpaceSymbolInPackage(pos, env, pkgAlias, annotationName);
}
public BSymbol resolveStructField(Location location, SymbolEnv env, Name fieldName,
BTypeSymbol structSymbol) {
return lookupMemberSymbol(location, structSymbol.scope, env, fieldName, SymTag.VARIABLE);
}
public BSymbol resolveObjectField(Location location, SymbolEnv env, Name fieldName,
BTypeSymbol objectSymbol) {
return lookupMemberSymbol(location, objectSymbol.scope, env, fieldName, SymTag.VARIABLE);
}
public BSymbol resolveObjectMethod(Location pos, SymbolEnv env, Name fieldName,
BObjectTypeSymbol objectSymbol) {
return lookupMemberSymbol(pos, objectSymbol.scope, env, fieldName, SymTag.VARIABLE);
}
public BType resolveTypeNode(BLangType typeNode, SymbolEnv env) {
return resolveTypeNode(typeNode, env, DiagnosticErrorCode.UNKNOWN_TYPE);
}
public BType resolveTypeNode(BLangType typeNode, SymbolEnv env, DiagnosticCode diagCode) {
SymbolEnv prevEnv = this.env;
DiagnosticCode preDiagCode = this.diagCode;
this.env = env;
this.diagCode = diagCode;
typeNode.accept(this);
this.env = prevEnv;
this.diagCode = preDiagCode;
if (this.resultType != symTable.noType) {
if (typeNode.nullable && this.resultType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) this.resultType;
unionType.add(symTable.nilType);
} else if (typeNode.nullable && resultType.tag != TypeTags.JSON && resultType.tag != TypeTags.ANY) {
this.resultType = BUnionType.create(null, resultType, symTable.nilType);
}
}
typeNode.setBType(resultType);
return resultType;
}
/**
* Return the symbol associated with the given name in the current package. This method first searches the symbol in
* the current scope and proceeds the enclosing scope, if it is not there in the current scope. This process
* continues until the symbol is found or the root scope is reached. This method is mainly meant for checking
* whether a given symbol is already defined in the scope hierarchy.
*
* @param env current symbol environment
* @param name symbol name
* @param expSymTag expected symbol type/tag
* @return resolved symbol
*/
private BSymbol lookupSymbolForDecl(SymbolEnv env, Name name, int expSymTag) {
ScopeEntry entry = env.scope.lookup(name);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & expSymTag) == expSymTag) {
return entry.symbol;
}
entry = entry.next;
}
if (env.enclEnv != null) {
return lookupSymbol(env.enclEnv, name, expSymTag);
}
return symTable.notFoundSymbol;
}
/**
* Return the symbol associated with the given name in the current package. This method first searches the symbol in
* the current scope and proceeds the enclosing scope, if it is not there in the current scope. This process
* continues until the symbol is found or the root scope is reached. This method is meant for looking up a symbol
* when they are referenced. If looking up a symbol from within a record type definition, this method ignores record
* fields. This is done so that default value expressions cannot refer to other record fields.
*
* @param env current symbol environment
* @param name symbol name
* @param expSymTag expected symbol type/tag
* @return resolved symbol
*/
private BSymbol lookupSymbol(SymbolEnv env, Name name, int expSymTag) {
ScopeEntry entry = env.scope.lookup(name);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & expSymTag) == expSymTag && !isFieldRefFromWithinARecord(entry.symbol, env)) {
return entry.symbol;
}
entry = entry.next;
}
if (env.enclEnv != null) {
return lookupSymbol(env.enclEnv, name, expSymTag);
}
return symTable.notFoundSymbol;
}
/**
* Checks whether the specified symbol is a symbol of a record field and whether that field is referred to from
* within a record type definition (not necessarily the owner of the field).
*
* @param symbol symbol to be tested
* @param env the environment in which the symbol was found
* @return returns `true` if the aboove described condition holds
*/
private boolean isFieldRefFromWithinARecord(BSymbol symbol, SymbolEnv env) {
return (symbol.owner.tag & SymTag.RECORD) == SymTag.RECORD &&
env.enclType != null && env.enclType.getKind() == NodeKind.RECORD_TYPE;
}
public BSymbol lookupSymbolInMainSpace(SymbolEnv env, Name name) {
return lookupSymbol(env, name, SymTag.MAIN);
}
public BSymbol lookupSymbolInAnnotationSpace(SymbolEnv env, Name name) {
return lookupSymbol(env, name, SymTag.ANNOTATION);
}
public BSymbol lookupSymbolInPrefixSpace(SymbolEnv env, Name name) {
return lookupSymbol(env, name, SymTag.IMPORT);
}
public BSymbol lookupSymbolInConstructorSpace(SymbolEnv env, Name name) {
return lookupSymbol(env, name, SymTag.CONSTRUCTOR);
}
public BSymbol lookupLangLibMethod(BType type, Name name) {
if (symTable.langAnnotationModuleSymbol == null) {
return symTable.notFoundSymbol;
}
BSymbol bSymbol;
switch (type.tag) {
case TypeTags.ARRAY:
case TypeTags.TUPLE:
bSymbol = lookupMethodInModule(symTable.langArrayModuleSymbol, name, env);
break;
case TypeTags.DECIMAL:
bSymbol = lookupMethodInModule(symTable.langDecimalModuleSymbol, name, env);
break;
case TypeTags.ERROR:
bSymbol = lookupMethodInModule(symTable.langErrorModuleSymbol, name, env);
break;
case TypeTags.FLOAT:
bSymbol = lookupMethodInModule(symTable.langFloatModuleSymbol, name, env);
break;
case TypeTags.FUTURE:
bSymbol = lookupMethodInModule(symTable.langFutureModuleSymbol, name, env);
break;
case TypeTags.INT:
case TypeTags.SIGNED32_INT:
case TypeTags.SIGNED16_INT:
case TypeTags.SIGNED8_INT:
case TypeTags.UNSIGNED32_INT:
case TypeTags.UNSIGNED16_INT:
case TypeTags.UNSIGNED8_INT:
bSymbol = lookupMethodInModule(symTable.langIntModuleSymbol, name, env);
break;
case TypeTags.MAP:
case TypeTags.RECORD:
bSymbol = lookupMethodInModule(symTable.langMapModuleSymbol, name, env);
break;
case TypeTags.OBJECT:
bSymbol = lookupMethodInModule(symTable.langObjectModuleSymbol, name, env);
break;
case TypeTags.STREAM:
bSymbol = lookupMethodInModule(symTable.langStreamModuleSymbol, name, env);
break;
case TypeTags.TABLE:
bSymbol = lookupMethodInModule(symTable.langTableModuleSymbol, name, env);
break;
case TypeTags.STRING:
case TypeTags.CHAR_STRING:
bSymbol = lookupMethodInModule(symTable.langStringModuleSymbol, name, env);
break;
case TypeTags.TYPEDESC:
bSymbol = lookupMethodInModule(symTable.langTypedescModuleSymbol, name, env);
break;
case TypeTags.XML:
case TypeTags.XML_ELEMENT:
case TypeTags.XML_COMMENT:
case TypeTags.XML_PI:
bSymbol = lookupMethodInModule(symTable.langXmlModuleSymbol, name, env);
break;
case TypeTags.XML_TEXT:
bSymbol = lookupMethodInModule(symTable.langXmlModuleSymbol, name, env);
if (bSymbol == symTable.notFoundSymbol) {
bSymbol = lookupMethodInModule(symTable.langStringModuleSymbol, name, env);
}
break;
case TypeTags.BOOLEAN:
bSymbol = lookupMethodInModule(symTable.langBooleanModuleSymbol, name, env);
break;
case TypeTags.UNION:
Iterator<BType> itr = ((BUnionType) type).getMemberTypes().iterator();
if (!itr.hasNext()) {
throw new IllegalArgumentException(
format("Union type '%s' does not have member types", type.toString()));
}
BType member = itr.next();
if (types.isSubTypeOfBaseType(type, member.tag)) {
bSymbol = lookupLangLibMethod(member, name);
} else {
bSymbol = symTable.notFoundSymbol;
}
break;
default:
bSymbol = symTable.notFoundSymbol;
}
if (bSymbol == symTable.notFoundSymbol && type.tag != TypeTags.OBJECT) {
bSymbol = lookupMethodInModule(symTable.langValueModuleSymbol, name, env);
}
if (bSymbol == symTable.notFoundSymbol) {
bSymbol = lookupMethodInModule(symTable.langInternalModuleSymbol, name, env);
}
return bSymbol;
}
/**
* Recursively analyse the symbol env to find the closure variable symbol that is being resolved.
*
* @param env symbol env to analyse and find the closure variable.
* @param name name of the symbol to lookup
* @param expSymTag symbol tag
* @return closure symbol wrapper along with the resolved count
*/
public BSymbol lookupClosureVarSymbol(SymbolEnv env, Name name, int expSymTag) {
ScopeEntry entry = env.scope.lookup(name);
while (entry != NOT_FOUND_ENTRY) {
if (symTable.rootPkgSymbol.pkgID.equals(entry.symbol.pkgID) &&
(entry.symbol.tag & SymTag.VARIABLE_NAME) == SymTag.VARIABLE_NAME) {
return entry.symbol;
}
if ((entry.symbol.tag & expSymTag) == expSymTag && !isFieldRefFromWithinARecord(entry.symbol, env)) {
return entry.symbol;
}
entry = entry.next;
}
if (env.enclEnv == null || env.enclEnv.node == null) {
return symTable.notFoundSymbol;
}
return lookupClosureVarSymbol(env.enclEnv, name, expSymTag);
}
public BSymbol lookupMainSpaceSymbolInPackage(Location pos,
SymbolEnv env,
Name pkgAlias,
Name name) {
if (pkgAlias == Names.EMPTY) {
return lookupSymbolInMainSpace(env, name);
}
BSymbol pkgSymbol =
resolvePrefixSymbol(env, pkgAlias, names.fromString(pos.lineRange().filePath()));
if (pkgSymbol == symTable.notFoundSymbol) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_MODULE, pkgAlias.value);
return pkgSymbol;
}
return lookupMemberSymbol(pos, pkgSymbol.scope, env, name, SymTag.MAIN);
}
public BSymbol lookupPrefixSpaceSymbolInPackage(Location pos,
SymbolEnv env,
Name pkgAlias,
Name name) {
if (pkgAlias == Names.EMPTY) {
return lookupSymbolInPrefixSpace(env, name);
}
BSymbol pkgSymbol =
resolvePrefixSymbol(env, pkgAlias, names.fromString(pos.lineRange().filePath()));
if (pkgSymbol == symTable.notFoundSymbol) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_MODULE, pkgAlias.value);
return pkgSymbol;
}
return lookupMemberSymbol(pos, pkgSymbol.scope, env, name, SymTag.IMPORT);
}
public BSymbol lookupAnnotationSpaceSymbolInPackage(Location pos,
SymbolEnv env,
Name pkgAlias,
Name name) {
if (pkgAlias == Names.EMPTY) {
return lookupSymbolInAnnotationSpace(env, name);
}
BSymbol pkgSymbol =
resolvePrefixSymbol(env, pkgAlias, names.fromString(pos.lineRange().filePath()));
if (pkgSymbol == symTable.notFoundSymbol) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_MODULE, pkgAlias.value);
return pkgSymbol;
}
return lookupMemberSymbol(pos, pkgSymbol.scope, env, name, SymTag.ANNOTATION);
}
public BSymbol lookupConstructorSpaceSymbolInPackage(Location pos,
SymbolEnv env,
Name pkgAlias,
Name name) {
if (pkgAlias == Names.EMPTY) {
return lookupSymbolInConstructorSpace(env, name);
}
BSymbol pkgSymbol =
resolvePrefixSymbol(env, pkgAlias, names.fromString(pos.lineRange().filePath()));
if (pkgSymbol == symTable.notFoundSymbol) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_MODULE, pkgAlias.value);
return pkgSymbol;
}
return lookupMemberSymbol(pos, pkgSymbol.scope, env, name, SymTag.CONSTRUCTOR);
}
public BSymbol lookupMethodInModule(BPackageSymbol moduleSymbol, Name name, SymbolEnv env) {
ScopeEntry entry = moduleSymbol.scope.lookup(name);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.FUNCTION) != SymTag.FUNCTION) {
entry = entry.next;
continue;
}
if (isMemberAccessAllowed(env, entry.symbol)) {
return entry.symbol;
}
return symTable.notFoundSymbol;
}
return symTable.notFoundSymbol;
}
/**
* Return the symbol with the given name.
* This method only looks at the symbol defined in the given scope.
*
* @param pos diagnostic position
* @param scope current scope
* @param env symbol environment
* @param name symbol name
* @param expSymTag expected symbol type/tag
* @return resolved symbol
*/
public BSymbol lookupMemberSymbol(Location pos,
Scope scope,
SymbolEnv env,
Name name,
int expSymTag) {
ScopeEntry entry = scope.lookup(name);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & expSymTag) != expSymTag) {
entry = entry.next;
continue;
}
if (isMemberAccessAllowed(env, entry.symbol)) {
return entry.symbol;
} else {
dlog.error(pos, DiagnosticErrorCode.ATTEMPT_REFER_NON_ACCESSIBLE_SYMBOL, entry.symbol.name);
return symTable.notFoundSymbol;
}
}
return symTable.notFoundSymbol;
}
/**
* Resolve and return the namespaces visible to the given environment, as a map.
*
* @param env Environment to get the visible namespaces
* @return Map of namespace symbols visible to the given environment
*/
public Map<Name, BXMLNSSymbol> resolveAllNamespaces(SymbolEnv env) {
Map<Name, BXMLNSSymbol> namespaces = new LinkedHashMap<Name, BXMLNSSymbol>();
addNamespacesInScope(namespaces, env);
return namespaces;
}
public void boostrapErrorType() {
ScopeEntry entry = symTable.rootPkgSymbol.scope.lookup(Names.ERROR);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.TYPE) != SymTag.TYPE) {
entry = entry.next;
continue;
}
symTable.errorType = (BErrorType) entry.symbol.type;
symTable.detailType = (BMapType) symTable.errorType.detailType;
return;
}
throw new IllegalStateException("built-in error not found ?");
}
public void defineOperators() {
symTable.defineOperators();
}
public void bootstrapAnydataType() {
ScopeEntry entry = symTable.langAnnotationModuleSymbol.scope.lookup(Names.ANYDATA);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.TYPE) != SymTag.TYPE) {
entry = entry.next;
continue;
}
BUnionType type = (BUnionType) entry.symbol.type;
symTable.anydataType = new BAnydataType(type);
symTable.anydataOrReadonly = BUnionType.create(null, symTable.anydataType, symTable.readonlyType);
entry.symbol.type = symTable.anydataType;
entry.symbol.origin = BUILTIN;
symTable.anydataType.tsymbol = new BTypeSymbol(SymTag.TYPE, Flags.PUBLIC, Names.ANYDATA,
PackageID.ANNOTATIONS, symTable.anydataType, symTable.rootPkgSymbol, symTable.builtinPos, BUILTIN);
return;
}
throw new IllegalStateException("built-in 'anydata' type not found");
}
public void bootstrapJsonType() {
ScopeEntry entry = symTable.langAnnotationModuleSymbol.scope.lookup(Names.JSON);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.TYPE) != SymTag.TYPE) {
entry = entry.next;
continue;
}
BUnionType type = (BUnionType) entry.symbol.type;
symTable.jsonType = new BJSONType(type);
symTable.jsonType.tsymbol = new BTypeSymbol(SymTag.TYPE, Flags.PUBLIC, Names.JSON, PackageID.ANNOTATIONS,
symTable.jsonType, symTable.langAnnotationModuleSymbol, symTable.builtinPos, BUILTIN);
entry.symbol.type = symTable.jsonType;
entry.symbol.origin = BUILTIN;
return;
}
throw new IllegalStateException("built-in 'json' type not found");
}
public void bootstrapCloneableType() {
if (symTable.langValueModuleSymbol != null) {
ScopeEntry entry = symTable.langValueModuleSymbol.scope.lookup(Names.CLONEABLE);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.TYPE) != SymTag.TYPE) {
entry = entry.next;
continue;
}
symTable.cloneableType = (BUnionType) entry.symbol.type;
symTable.cloneableType.tsymbol =
new BTypeSymbol(SymTag.TYPE, Flags.PUBLIC, Names.CLONEABLE,
PackageID.VALUE, symTable.cloneableType, symTable.langValueModuleSymbol,
symTable.builtinPos, BUILTIN);
symTable.detailType = new BMapType(TypeTags.MAP, symTable.cloneableType, null);
symTable.errorType = new BErrorType(null, symTable.detailType);
symTable.errorType.tsymbol = new BErrorTypeSymbol(SymTag.ERROR, Flags.PUBLIC, Names.ERROR,
symTable.rootPkgSymbol.pkgID, symTable.errorType, symTable.rootPkgSymbol, symTable.builtinPos
, BUILTIN);
symTable.errorOrNilType = BUnionType.create(null, symTable.errorType, symTable.nilType);
symTable.anyOrErrorType = BUnionType.create(null, symTable.anyType, symTable.errorType);
symTable.mapAllType = new BMapType(TypeTags.MAP, symTable.anyOrErrorType, null);
symTable.arrayAllType = new BArrayType(symTable.anyOrErrorType);
symTable.typeDesc.constraint = symTable.anyOrErrorType;
symTable.futureType.constraint = symTable.anyOrErrorType;
symTable.pureType = BUnionType.create(null, symTable.anydataType, symTable.errorType);
return;
}
throw new IllegalStateException("built-in 'lang.value:Cloneable' type not found");
}
ScopeEntry entry = symTable.rootPkgSymbol.scope.lookup(Names.CLONEABLE_INTERNAL);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.TYPE) != SymTag.TYPE) {
entry = entry.next;
continue;
}
entry.symbol.type = symTable.cloneableType;
break;
}
}
public void bootstrapIntRangeType() {
ScopeEntry entry = symTable.langInternalModuleSymbol.scope.lookup(Names.CREATE_INT_RANGE);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.INVOKABLE) != SymTag.INVOKABLE) {
entry = entry.next;
continue;
}
symTable.intRangeType = (BObjectType) ((BInvokableType) entry.symbol.type).retType;
symTable.defineIntRangeOperations();
return;
}
throw new IllegalStateException("built-in Integer Range type not found ?");
}
public void bootstrapIterableType() {
ScopeEntry entry = symTable.langObjectModuleSymbol.scope.lookup(Names.OBJECT_ITERABLE);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.TYPE) != SymTag.TYPE) {
entry = entry.next;
continue;
}
symTable.iterableType = (BObjectType) entry.symbol.type;
return;
}
throw new IllegalStateException("built-in distinct Iterable type not found ?");
}
public void loadRawTemplateType() {
ScopeEntry entry = symTable.langObjectModuleSymbol.scope.lookup(Names.RAW_TEMPLATE);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.TYPE) != SymTag.TYPE) {
entry = entry.next;
continue;
}
symTable.rawTemplateType = (BObjectType) entry.symbol.type;
return;
}
throw new IllegalStateException("'lang.object:RawTemplate' type not found");
}
public void visit(BLangValueType valueTypeNode) {
visitBuiltInTypeNode(valueTypeNode, valueTypeNode.typeKind, this.env);
}
public void visit(BLangBuiltInRefTypeNode builtInRefType) {
visitBuiltInTypeNode(builtInRefType, builtInRefType.typeKind, this.env);
}
public void visit(BLangArrayType arrayTypeNode) {
resultType = resolveTypeNode(arrayTypeNode.elemtype, env, diagCode);
if (resultType == symTable.noType) {
return;
}
boolean isError = false;
for (int i = 0; i < arrayTypeNode.dimensions; i++) {
BTypeSymbol arrayTypeSymbol = Symbols.createTypeSymbol(SymTag.ARRAY_TYPE, Flags.PUBLIC, Names.EMPTY,
env.enclPkg.symbol.pkgID, null, env.scope.owner,
arrayTypeNode.pos, SOURCE);
BArrayType arrType;
if (arrayTypeNode.sizes.length == 0) {
arrType = new BArrayType(resultType, arrayTypeSymbol);
} else {
BLangExpression size = arrayTypeNode.sizes[i];
if (size.getKind() == NodeKind.LITERAL || size.getKind() == NodeKind.NUMERIC_LITERAL) {
Integer sizeIndicator = (Integer) (((BLangLiteral) size).getValue());
BArrayState arrayState;
if (sizeIndicator == OPEN_ARRAY_INDICATOR) {
arrayState = BArrayState.OPEN;
} else if (sizeIndicator == INFERRED_ARRAY_INDICATOR) {
arrayState = BArrayState.INFERRED;
} else {
arrayState = BArrayState.CLOSED;
}
arrType = new BArrayType(resultType, arrayTypeSymbol, sizeIndicator, arrayState);
} else {
if (size.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
dlog.error(size.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.intType,
((BLangTypedescExpr) size).getTypeNode());
isError = true;
continue;
}
BLangSimpleVarRef sizeReference = (BLangSimpleVarRef) size;
Name pkgAlias = names.fromIdNode(sizeReference.pkgAlias);
Name typeName = names.fromIdNode(sizeReference.variableName);
BSymbol sizeSymbol = lookupMainSpaceSymbolInPackage(size.pos, env, pkgAlias, typeName);
sizeReference.symbol = sizeSymbol;
if (symTable.notFoundSymbol == sizeSymbol) {
dlog.error(arrayTypeNode.pos, DiagnosticErrorCode.UNDEFINED_SYMBOL, size);
isError = true;
continue;
}
if (sizeSymbol.tag != SymTag.CONSTANT) {
dlog.error(size.pos, DiagnosticErrorCode.INVALID_ARRAY_SIZE_REFERENCE, sizeSymbol);
isError = true;
continue;
}
BConstantSymbol sizeConstSymbol = (BConstantSymbol) sizeSymbol;
BType lengthLiteralType = sizeConstSymbol.literalType;
if (lengthLiteralType.tag != TypeTags.INT) {
dlog.error(size.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.intType,
sizeConstSymbol.literalType);
isError = true;
continue;
}
int length = Integer.parseInt(sizeConstSymbol.type.toString());
arrType = new BArrayType(resultType, arrayTypeSymbol, length, BArrayState.CLOSED);
}
}
arrayTypeSymbol.type = arrType;
resultType = arrayTypeSymbol.type;
markParameterizedType(arrType, arrType.eType);
}
if (isError) {
resultType = symTable.semanticError;
}
}
public void visit(BLangUnionTypeNode unionTypeNode) {
LinkedHashSet<BType> memberTypes = new LinkedHashSet<>();
for (BLangType langType : unionTypeNode.memberTypeNodes) {
BType resolvedType = resolveTypeNode(langType, env);
if (resolvedType == symTable.noType) {
resultType = symTable.noType;
return;
}
memberTypes.add(resolvedType);
}
BTypeSymbol unionTypeSymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, Flags.asMask(EnumSet.of(Flag.PUBLIC)),
Names.EMPTY, env.enclPkg.symbol.pkgID, null,
env.scope.owner, unionTypeNode.pos, SOURCE);
BUnionType unionType = BUnionType.create(unionTypeSymbol, memberTypes);
unionTypeSymbol.type = unionType;
markParameterizedType(unionType, memberTypes);
resultType = unionType;
}
public void visit(BLangIntersectionTypeNode intersectionTypeNode) {
resultType = computeIntersectionType(intersectionTypeNode);
}
public void visit(BLangObjectTypeNode objectTypeNode) {
EnumSet<Flag> flags = EnumSet.copyOf(objectTypeNode.flagSet);
if (objectTypeNode.isAnonymous) {
flags.add(Flag.PUBLIC);
}
int typeFlags = 0;
if (flags.contains(Flag.READONLY)) {
typeFlags |= Flags.READONLY;
}
if (flags.contains(Flag.ISOLATED)) {
typeFlags |= Flags.ISOLATED;
}
if (flags.contains(Flag.SERVICE)) {
typeFlags |= Flags.SERVICE;
}
BTypeSymbol objectSymbol = Symbols.createObjectSymbol(Flags.asMask(flags), Names.EMPTY,
env.enclPkg.symbol.pkgID, null, env.scope.owner, objectTypeNode.pos, SOURCE);
BObjectType objectType = new BObjectType(objectSymbol, typeFlags);
objectSymbol.type = objectType;
objectTypeNode.symbol = objectSymbol;
resultType = objectType;
}
public void visit(BLangRecordTypeNode recordTypeNode) {
if (recordTypeNode.symbol == null) {
EnumSet<Flag> flags = recordTypeNode.isAnonymous ? EnumSet.of(Flag.PUBLIC, Flag.ANONYMOUS)
: EnumSet.noneOf(Flag.class);
BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(Flags.asMask(flags), Names.EMPTY,
env.enclPkg.symbol.pkgID, null,
env.scope.owner, recordTypeNode.pos,
recordTypeNode.isAnonymous ? VIRTUAL : SOURCE);
BRecordType recordType = new BRecordType(recordSymbol);
recordSymbol.type = recordType;
recordTypeNode.symbol = recordSymbol;
if (env.node.getKind() != NodeKind.PACKAGE) {
recordSymbol.name = names.fromString(
anonymousModelHelper.getNextAnonymousTypeKey(env.enclPkg.packageID));
symbolEnter.defineSymbol(recordTypeNode.pos, recordTypeNode.symbol, env);
symbolEnter.defineNode(recordTypeNode, env);
}
resultType = recordType;
} else {
resultType = recordTypeNode.symbol.type;
}
}
public void visit(BLangStreamType streamTypeNode) {
BType type = resolveTypeNode(streamTypeNode.type, env);
BType constraintType = resolveTypeNode(streamTypeNode.constraint, env);
BType error = streamTypeNode.error != null ? resolveTypeNode(streamTypeNode.error, env) : symTable.nilType;
if (constraintType == symTable.noType) {
resultType = symTable.noType;
return;
}
BType streamType = new BStreamType(TypeTags.STREAM, constraintType, error, null);
BTypeSymbol typeSymbol = type.tsymbol;
streamType.tsymbol = Symbols.createTypeSymbol(typeSymbol.tag, typeSymbol.flags, typeSymbol.name,
typeSymbol.pkgID, streamType, typeSymbol.owner,
streamTypeNode.pos, SOURCE);
markParameterizedType(streamType, constraintType);
if (error != null) {
markParameterizedType(streamType, error);
}
resultType = streamType;
}
public void visit(BLangTableTypeNode tableTypeNode) {
BType type = resolveTypeNode(tableTypeNode.type, env);
BType constraintType = resolveTypeNode(tableTypeNode.constraint, env);
if (constraintType == symTable.noType) {
resultType = symTable.noType;
return;
}
BTableType tableType = new BTableType(TypeTags.TABLE, constraintType, null);
BTypeSymbol typeSymbol = type.tsymbol;
tableType.tsymbol = Symbols.createTypeSymbol(SymTag.TYPE, Flags.asMask(EnumSet.noneOf(Flag.class)),
typeSymbol.name, env.enclPkg.symbol.pkgID, tableType,
env.scope.owner, tableTypeNode.pos, SOURCE);
tableType.tsymbol.flags = typeSymbol.flags;
tableType.constraintPos = tableTypeNode.constraint.pos;
tableType.isTypeInlineDefined = tableTypeNode.isTypeInlineDefined;
if (tableTypeNode.tableKeyTypeConstraint != null) {
tableType.keyTypeConstraint = resolveTypeNode(tableTypeNode.tableKeyTypeConstraint.keyType, env);
tableType.keyPos = tableTypeNode.tableKeyTypeConstraint.pos;
} else if (tableTypeNode.tableKeySpecifier != null) {
BLangTableKeySpecifier tableKeySpecifier = tableTypeNode.tableKeySpecifier;
List<String> fieldNameList = new ArrayList<>();
for (IdentifierNode identifier : tableKeySpecifier.fieldNameIdentifierList) {
fieldNameList.add(((BLangIdentifier) identifier).value);
}
tableType.fieldNameList = fieldNameList;
tableType.keyPos = tableKeySpecifier.pos;
}
if (constraintType.tag == TypeTags.MAP &&
(tableType.fieldNameList != null || tableType.keyTypeConstraint != null) &&
!tableType.tsymbol.owner.getFlags().contains(Flag.LANG_LIB)) {
dlog.error(tableType.keyPos,
DiagnosticErrorCode.KEY_CONSTRAINT_NOT_SUPPORTED_FOR_TABLE_WITH_MAP_CONSTRAINT);
resultType = symTable.semanticError;
return;
}
markParameterizedType(tableType, constraintType);
tableTypeNode.tableType = tableType;
resultType = tableType;
}
public void visit(BLangFiniteTypeNode finiteTypeNode) {
BTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE,
Flags.asMask(EnumSet.noneOf(Flag.class)), Names.EMPTY,
env.enclPkg.symbol.pkgID, null, env.scope.owner,
finiteTypeNode.pos, SOURCE);
BFiniteType finiteType = new BFiniteType(finiteTypeSymbol);
for (BLangExpression literal : finiteTypeNode.valueSpace) {
((BLangLiteral) literal).setBType(symTable.getTypeFromTag(((BLangLiteral) literal).getBType().tag));
finiteType.addValue(literal);
}
finiteTypeSymbol.type = finiteType;
resultType = finiteType;
}
public void visit(BLangTupleTypeNode tupleTypeNode) {
List<BType> memberTypes = new ArrayList<>();
for (BLangType memTypeNode : tupleTypeNode.memberTypeNodes) {
BType type = resolveTypeNode(memTypeNode, env);
if (type == symTable.noType) {
resultType = symTable.noType;
return;
}
memberTypes.add(type);
}
BTypeSymbol tupleTypeSymbol = Symbols.createTypeSymbol(SymTag.TUPLE_TYPE, Flags.asMask(EnumSet.of(Flag.PUBLIC)),
Names.EMPTY, env.enclPkg.symbol.pkgID, null,
env.scope.owner, tupleTypeNode.pos, SOURCE);
BTupleType tupleType = new BTupleType(tupleTypeSymbol, memberTypes);
tupleTypeSymbol.type = tupleType;
if (tupleTypeNode.restParamType != null) {
BType tupleRestType = resolveTypeNode(tupleTypeNode.restParamType, env);
if (tupleRestType == symTable.noType) {
resultType = symTable.noType;
return;
}
tupleType.restType = tupleRestType;
markParameterizedType(tupleType, tupleType.restType);
}
markParameterizedType(tupleType, memberTypes);
resultType = tupleType;
}
public void visit(BLangErrorType errorTypeNode) {
BType detailType = Optional.ofNullable(errorTypeNode.detailType)
.map(bLangType -> resolveTypeNode(bLangType, env)).orElse(symTable.detailType);
if (errorTypeNode.isAnonymous) {
errorTypeNode.flagSet.add(Flag.PUBLIC);
errorTypeNode.flagSet.add(Flag.ANONYMOUS);
}
boolean distinctErrorDef = errorTypeNode.flagSet.contains(Flag.DISTINCT);
if (detailType == symTable.detailType && !distinctErrorDef &&
!this.env.enclPkg.packageID.equals(PackageID.ANNOTATIONS)) {
resultType = symTable.errorType;
return;
}
BErrorTypeSymbol errorTypeSymbol = Symbols
.createErrorSymbol(Flags.asMask(errorTypeNode.flagSet), Names.EMPTY, env.enclPkg.symbol.pkgID,
null, env.scope.owner, errorTypeNode.pos, SOURCE);
if (env.node.getKind() != NodeKind.PACKAGE) {
errorTypeSymbol.name = names.fromString(
anonymousModelHelper.getNextAnonymousTypeKey(env.enclPkg.packageID));
symbolEnter.defineSymbol(errorTypeNode.pos, errorTypeSymbol, env);
}
BErrorType errorType = new BErrorType(errorTypeSymbol, detailType);
errorType.flags |= errorTypeSymbol.flags;
errorTypeSymbol.type = errorType;
markParameterizedType(errorType, detailType);
errorType.typeIdSet = BTypeIdSet.emptySet();
resultType = errorType;
}
public void visit(BLangConstrainedType constrainedTypeNode) {
BType type = resolveTypeNode(constrainedTypeNode.type, env);
BType constraintType = resolveTypeNode(constrainedTypeNode.constraint, env);
if (constraintType == symTable.noType) {
resultType = symTable.noType;
return;
}
BType constrainedType = null;
if (type.tag == TypeTags.FUTURE) {
constrainedType = new BFutureType(TypeTags.FUTURE, constraintType, null);
} else if (type.tag == TypeTags.MAP) {
constrainedType = new BMapType(TypeTags.MAP, constraintType, null);
} else if (type.tag == TypeTags.TYPEDESC) {
constrainedType = new BTypedescType(constraintType, null);
} else if (type.tag == TypeTags.XML) {
if (constraintType.tag == TypeTags.PARAMETERIZED_TYPE) {
BType typedescType = ((BParameterizedType) constraintType).paramSymbol.type;
BType typedescConstraint = ((BTypedescType) typedescType).constraint;
validateXMLConstraintType(typedescConstraint, constrainedTypeNode.pos);
} else {
validateXMLConstraintType(constraintType, constrainedTypeNode.pos);
}
constrainedType = new BXMLType(constraintType, null);
} else {
return;
}
BTypeSymbol typeSymbol = type.tsymbol;
constrainedType.tsymbol = Symbols.createTypeSymbol(typeSymbol.tag, typeSymbol.flags, typeSymbol.name,
typeSymbol.pkgID, constrainedType, typeSymbol.owner,
constrainedTypeNode.pos, SOURCE);
markParameterizedType(constrainedType, constraintType);
resultType = constrainedType;
}
private void validateXMLConstraintType(BType constraintType, Location pos) {
if (constraintType.tag == TypeTags.UNION) {
checkUnionTypeForXMLSubTypes((BUnionType) constraintType, pos);
return;
}
if (!TypeTags.isXMLTypeTag(constraintType.tag) && constraintType.tag != TypeTags.NEVER) {
dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_CONSTRAINT, symTable.xmlType, constraintType);
}
}
private void checkUnionTypeForXMLSubTypes(BUnionType constraintUnionType, Location pos) {
for (BType memberType : constraintUnionType.getMemberTypes()) {
if (memberType.tag == TypeTags.UNION) {
checkUnionTypeForXMLSubTypes((BUnionType) memberType, pos);
}
if (!TypeTags.isXMLTypeTag(memberType.tag)) {
dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_CONSTRAINT, symTable.xmlType,
constraintUnionType);
}
}
}
public void visit(BLangUserDefinedType userDefinedTypeNode) {
Name pkgAlias = names.fromIdNode(userDefinedTypeNode.pkgAlias);
Name typeName = names.fromIdNode(userDefinedTypeNode.typeName);
BSymbol symbol = symTable.notFoundSymbol;
if (env.scope.owner.tag == SymTag.ANNOTATION) {
symbol = lookupAnnotationSpaceSymbolInPackage(userDefinedTypeNode.pos, env, pkgAlias, typeName);
}
if (symbol == symTable.notFoundSymbol) {
BSymbol tempSymbol = lookupMainSpaceSymbolInPackage(userDefinedTypeNode.pos, env, pkgAlias, typeName);
if ((tempSymbol.tag & SymTag.TYPE) == SymTag.TYPE) {
symbol = tempSymbol;
} else if (Symbols.isTagOn(tempSymbol, SymTag.VARIABLE) && env.node.getKind() == NodeKind.FUNCTION) {
BLangFunction func = (BLangFunction) env.node;
boolean errored = false;
if (func.returnTypeNode == null ||
(func.hasBody() && func.body.getKind() != NodeKind.EXTERN_FUNCTION_BODY)) {
dlog.error(userDefinedTypeNode.pos,
DiagnosticErrorCode.INVALID_NON_EXTERNAL_DEPENDENTLY_TYPED_FUNCTION);
errored = true;
}
if (tempSymbol.type != null && tempSymbol.type.tag != TypeTags.TYPEDESC) {
dlog.error(userDefinedTypeNode.pos, DiagnosticErrorCode.INVALID_PARAM_TYPE_FOR_RETURN_TYPE,
tempSymbol.type);
errored = true;
}
if (errored) {
this.resultType = symTable.semanticError;
return;
}
ParameterizedTypeInfo parameterizedTypeInfo =
getTypedescParamValueType(func.requiredParams, tempSymbol);
BType paramValType = parameterizedTypeInfo == null ? null : parameterizedTypeInfo.paramValueType;
if (paramValType == symTable.semanticError) {
this.resultType = symTable.semanticError;
return;
}
if (paramValType != null) {
BTypeSymbol tSymbol = new BTypeSymbol(SymTag.TYPE, Flags.PARAMETERIZED | tempSymbol.flags,
tempSymbol.name, tempSymbol.pkgID, null, func.symbol,
tempSymbol.pos, VIRTUAL);
tSymbol.type = new BParameterizedType(paramValType, (BVarSymbol) tempSymbol,
tSymbol, tempSymbol.name, parameterizedTypeInfo.index);
tSymbol.type.flags |= Flags.PARAMETERIZED;
this.resultType = tSymbol.type;
userDefinedTypeNode.symbol = tSymbol;
return;
}
}
}
if (symbol == symTable.notFoundSymbol) {
symbol = lookupMemberSymbol(userDefinedTypeNode.pos, symTable.rootScope, this.env, typeName,
SymTag.VARIABLE_NAME);
}
if (this.env.logErrors && symbol == symTable.notFoundSymbol) {
if (!missingNodesHelper.isMissingNode(pkgAlias) && !missingNodesHelper.isMissingNode(typeName) &&
!symbolEnter.isUnknownTypeRef(userDefinedTypeNode)) {
dlog.error(userDefinedTypeNode.pos, diagCode, typeName);
}
resultType = symTable.semanticError;
return;
}
userDefinedTypeNode.symbol = symbol;
resultType = symbol.type;
}
private ParameterizedTypeInfo getTypedescParamValueType(List<BLangSimpleVariable> params, BSymbol varSym) {
for (int i = 0; i < params.size(); i++) {
BLangSimpleVariable param = params.get(i);
if (param.name.value.equals(varSym.name.value)) {
if (param.expr == null || param.expr.getKind() == NodeKind.INFER_TYPEDESC_EXPR) {
return new ParameterizedTypeInfo(((BTypedescType) varSym.type).constraint, i);
}
NodeKind defaultValueExprKind = param.expr.getKind();
if (defaultValueExprKind == NodeKind.TYPEDESC_EXPRESSION) {
return new ParameterizedTypeInfo(
resolveTypeNode(((BLangTypedescExpr) param.expr).typeNode, this.env), i);
}
if (defaultValueExprKind == NodeKind.SIMPLE_VARIABLE_REF) {
Name varName = names.fromIdNode(((BLangSimpleVarRef) param.expr).variableName);
BSymbol typeRefSym = lookupSymbolInMainSpace(this.env, varName);
if (typeRefSym != symTable.notFoundSymbol) {
return new ParameterizedTypeInfo(typeRefSym.type, i);
}
return new ParameterizedTypeInfo(symTable.semanticError);
}
dlog.error(param.pos, DiagnosticErrorCode.INVALID_TYPEDESC_PARAM);
return new ParameterizedTypeInfo(symTable.semanticError);
}
}
return null;
}
@Override
public void visit(BLangFunctionTypeNode functionTypeNode) {
List<BLangVariable> params = functionTypeNode.getParams();
Location pos = functionTypeNode.pos;
BLangType returnTypeNode = functionTypeNode.returnTypeNode;
BType invokableType = createInvokableType(params, functionTypeNode.restParam, returnTypeNode,
Flags.asMask(functionTypeNode.flagSet), env, pos);
resultType = validateInferTypedescParams(pos, params, returnTypeNode == null ?
null : returnTypeNode.getBType()) ? invokableType : symTable.semanticError;
}
public BType createInvokableType(List<? extends BLangVariable> paramVars,
BLangVariable restVariable,
BLangType retTypeVar,
long flags,
SymbolEnv env,
Location location) {
List<BType> paramTypes = new ArrayList<>();
List<BVarSymbol> params = new ArrayList<>();
boolean foundDefaultableParam = false;
List<String> paramNames = new ArrayList<>();
if (Symbols.isFlagOn(flags, Flags.ANY_FUNCTION)) {
BInvokableType bInvokableType = new BInvokableType(null, null, null, null);
bInvokableType.flags = flags;
BInvokableTypeSymbol tsymbol = Symbols.createInvokableTypeSymbol(SymTag.FUNCTION_TYPE, flags,
env.enclPkg.symbol.pkgID, bInvokableType,
env.scope.owner, location, SOURCE);
tsymbol.params = null;
tsymbol.restParam = null;
tsymbol.returnType = null;
bInvokableType.tsymbol = tsymbol;
return bInvokableType;
}
for (BLangVariable paramNode : paramVars) {
BLangSimpleVariable param = (BLangSimpleVariable) paramNode;
Name paramName = names.fromIdNode(param.name);
if (paramName != Names.EMPTY) {
if (paramNames.contains(paramName.value)) {
dlog.error(param.name.pos, DiagnosticErrorCode.REDECLARED_SYMBOL, paramName.value);
} else {
paramNames.add(paramName.value);
}
}
BType type = resolveTypeNode(param.getTypeNode(), env);
if (type == symTable.noType) {
return symTable.noType;
}
paramNode.setBType(type);
paramTypes.add(type);
long paramFlags = Flags.asMask(paramNode.flagSet);
BVarSymbol symbol = new BVarSymbol(paramFlags, paramName, env.enclPkg.symbol.pkgID, type, env.scope.owner,
param.pos, SOURCE);
param.symbol = symbol;
if (param.expr != null) {
foundDefaultableParam = true;
symbol.isDefaultable = true;
symbol.flags |= Flags.OPTIONAL;
} else if (foundDefaultableParam) {
dlog.error(param.pos, DiagnosticErrorCode.REQUIRED_PARAM_DEFINED_AFTER_DEFAULTABLE_PARAM);
}
params.add(symbol);
}
BType retType = resolveTypeNode(retTypeVar, env);
if (retType == symTable.noType) {
return symTable.noType;
}
BVarSymbol restParam = null;
BType restType = null;
if (restVariable != null) {
restType = resolveTypeNode(restVariable.typeNode, env);
if (restType == symTable.noType) {
return symTable.noType;
}
restVariable.setBType(restType);
restParam = new BVarSymbol(Flags.asMask(restVariable.flagSet),
names.fromIdNode(((BLangSimpleVariable) restVariable).name),
env.enclPkg.symbol.pkgID, restType, env.scope.owner, restVariable.pos, SOURCE);
}
BInvokableType bInvokableType = new BInvokableType(paramTypes, restType, retType, null);
bInvokableType.flags = flags;
BInvokableTypeSymbol tsymbol = Symbols.createInvokableTypeSymbol(SymTag.FUNCTION_TYPE, flags,
env.enclPkg.symbol.pkgID, bInvokableType,
env.scope.owner, location, SOURCE);
tsymbol.params = params;
tsymbol.restParam = restParam;
tsymbol.returnType = retType;
bInvokableType.tsymbol = tsymbol;
List<BType> allConstituentTypes = new ArrayList<>(paramTypes);
allConstituentTypes.add(restType);
allConstituentTypes.add(retType);
markParameterizedType(bInvokableType, allConstituentTypes);
return bInvokableType;
}
/**
* Lookup all the visible in-scope symbols for a given environment scope.
*
* @param env Symbol environment
* @return all the visible symbols
*/
public Map<Name, List<ScopeEntry>> getAllVisibleInScopeSymbols(SymbolEnv env) {
Map<Name, List<ScopeEntry>> visibleEntries = new HashMap<>();
env.scope.entries.forEach((key, value) -> {
ArrayList<ScopeEntry> entryList = new ArrayList<>();
entryList.add(value);
visibleEntries.put(key, entryList);
});
if (env.enclEnv != null) {
getAllVisibleInScopeSymbols(env.enclEnv).forEach((name, entryList) -> {
if (!visibleEntries.containsKey(name)) {
visibleEntries.put(name, entryList);
} else {
List<ScopeEntry> scopeEntries = visibleEntries.get(name);
entryList.forEach(scopeEntry -> {
if (!scopeEntries.contains(scopeEntry) && !isModuleLevelVar(scopeEntry.symbol)) {
scopeEntries.add(scopeEntry);
}
});
}
});
}
return visibleEntries;
}
public BSymbol getBinaryEqualityForTypeSets(OperatorKind opKind, BType lhsType, BType rhsType,
BLangBinaryExpr binaryExpr) {
boolean validEqualityIntersectionExists;
switch (opKind) {
case EQUAL:
case NOT_EQUAL:
validEqualityIntersectionExists = types.validEqualityIntersectionExists(lhsType, rhsType);
break;
case REF_EQUAL:
case REF_NOT_EQUAL:
validEqualityIntersectionExists =
types.isAssignable(lhsType, rhsType) || types.isAssignable(rhsType, lhsType);
break;
default:
return symTable.notFoundSymbol;
}
if (validEqualityIntersectionExists) {
if ((!types.isValueType(lhsType) && !types.isValueType(rhsType)) ||
(types.isValueType(lhsType) && types.isValueType(rhsType))) {
return createEqualityOperator(opKind, lhsType, rhsType);
} else {
types.setImplicitCastExpr(binaryExpr.rhsExpr, rhsType, symTable.anyType);
types.setImplicitCastExpr(binaryExpr.lhsExpr, lhsType, symTable.anyType);
switch (opKind) {
case REF_EQUAL:
return createEqualityOperator(OperatorKind.EQUAL, symTable.anyType,
symTable.anyType);
case REF_NOT_EQUAL:
return createEqualityOperator(OperatorKind.NOT_EQUAL, symTable.anyType,
symTable.anyType);
default:
return createEqualityOperator(opKind, symTable.anyType, symTable.anyType);
}
}
}
return symTable.notFoundSymbol;
}
public BSymbol getBitwiseShiftOpsForTypeSets(OperatorKind opKind, BType lhsType, BType rhsType) {
boolean validIntTypesExists;
switch (opKind) {
case BITWISE_LEFT_SHIFT:
case BITWISE_RIGHT_SHIFT:
case BITWISE_UNSIGNED_RIGHT_SHIFT:
validIntTypesExists = types.validIntegerTypeExists(lhsType) && types.validIntegerTypeExists(rhsType);
break;
default:
return symTable.notFoundSymbol;
}
if (validIntTypesExists) {
switch (opKind) {
case BITWISE_LEFT_SHIFT:
return createBinaryOperator(opKind, lhsType, rhsType, symTable.intType);
case BITWISE_RIGHT_SHIFT:
case BITWISE_UNSIGNED_RIGHT_SHIFT:
switch (lhsType.tag) {
case TypeTags.UNSIGNED32_INT:
case TypeTags.UNSIGNED16_INT:
case TypeTags.UNSIGNED8_INT:
case TypeTags.BYTE:
return createBinaryOperator(opKind, lhsType, rhsType, lhsType);
default:
return createBinaryOperator(opKind, lhsType, rhsType, symTable.intType);
}
}
}
return symTable.notFoundSymbol;
}
public BSymbol getArithmeticOpsForTypeSets(OperatorKind opKind, BType lhsType, BType rhsType) {
boolean validNumericOrStringTypeExists;
switch (opKind) {
case ADD:
validNumericOrStringTypeExists = (types.validNumericTypeExists(lhsType) &&
types.validNumericTypeExists(rhsType)) || (types.validStringOrXmlTypeExists(lhsType) &&
types.validStringOrXmlTypeExists(rhsType));
break;
case SUB:
case DIV:
case MUL:
case MOD:
validNumericOrStringTypeExists = types.validNumericTypeExists(lhsType) &&
types.validNumericTypeExists(rhsType);
break;
default:
return symTable.notFoundSymbol;
}
if (validNumericOrStringTypeExists) {
BType compatibleType1 = types.findCompatibleType(lhsType);
BType compatibleType2 = types.findCompatibleType(rhsType);
if (types.isBasicNumericType(compatibleType1) && compatibleType1 != compatibleType2) {
return symTable.notFoundSymbol;
}
if (compatibleType1.tag < compatibleType2.tag) {
return createBinaryOperator(opKind, lhsType, rhsType, compatibleType2);
}
return createBinaryOperator(opKind, lhsType, rhsType, compatibleType1);
}
return symTable.notFoundSymbol;
}
/**
* Define binary comparison operator for valid ordered types.
*
* @param opKind Binary operator kind
* @param lhsType Type of the left hand side value
* @param rhsType Type of the right hand side value
* @return <, <=, >, or >= symbol
*/
public BSymbol getBinaryComparisonOpForTypeSets(OperatorKind opKind, BType lhsType, BType rhsType) {
boolean validOrderedTypesExist;
switch (opKind) {
case LESS_THAN:
case LESS_EQUAL:
case GREATER_THAN:
case GREATER_EQUAL:
validOrderedTypesExist = types.isOrderedType(lhsType, false) &&
types.isOrderedType(rhsType, false) && types.isSameOrderedType(lhsType, rhsType);
break;
default:
return symTable.notFoundSymbol;
}
if (validOrderedTypesExist) {
switch (opKind) {
case LESS_THAN:
return createBinaryComparisonOperator(OperatorKind.LESS_THAN, lhsType, rhsType);
case LESS_EQUAL:
return createBinaryComparisonOperator(OperatorKind.LESS_EQUAL, lhsType, rhsType);
case GREATER_THAN:
return createBinaryComparisonOperator(OperatorKind.GREATER_THAN, lhsType, rhsType);
default:
return createBinaryComparisonOperator(OperatorKind.GREATER_EQUAL, lhsType, rhsType);
}
}
return symTable.notFoundSymbol;
}
public boolean isBinaryShiftOperator(OperatorKind binaryOpKind) {
return binaryOpKind == OperatorKind.BITWISE_LEFT_SHIFT ||
binaryOpKind == OperatorKind.BITWISE_RIGHT_SHIFT ||
binaryOpKind == OperatorKind.BITWISE_UNSIGNED_RIGHT_SHIFT;
}
public boolean isArithmeticOperator(OperatorKind binaryOpKind) {
return binaryOpKind == OperatorKind.ADD || binaryOpKind == OperatorKind.SUB ||
binaryOpKind == OperatorKind.DIV || binaryOpKind == OperatorKind.MUL ||
binaryOpKind == OperatorKind.MOD;
}
public boolean isBinaryComparisonOperator(OperatorKind binaryOpKind) {
return binaryOpKind == OperatorKind.LESS_THAN ||
binaryOpKind == OperatorKind.LESS_EQUAL || binaryOpKind == OperatorKind.GREATER_THAN ||
binaryOpKind == OperatorKind.GREATER_EQUAL;
}
public boolean markParameterizedType(BType type, BType constituentType) {
if (Symbols.isFlagOn(constituentType.flags, Flags.PARAMETERIZED)) {
type.tsymbol.flags |= Flags.PARAMETERIZED;
type.flags |= Flags.PARAMETERIZED;
return true;
}
return false;
}
public void markParameterizedType(BType enclosingType, Collection<BType> constituentTypes) {
if (Symbols.isFlagOn(enclosingType.flags, Flags.PARAMETERIZED)) {
return;
}
for (BType type : constituentTypes) {
if (type == null) {
continue;
}
if (markParameterizedType(enclosingType, type)) {
break;
}
}
}
private BSymbol resolveOperator(ScopeEntry entry, List<BType> types) {
BSymbol foundSymbol = symTable.notFoundSymbol;
while (entry != NOT_FOUND_ENTRY) {
BInvokableType opType = (BInvokableType) entry.symbol.type;
if (types.size() == opType.paramTypes.size()) {
boolean match = true;
for (int i = 0; i < types.size(); i++) {
if (types.get(i).tag != opType.paramTypes.get(i).tag) {
match = false;
}
}
if (match) {
foundSymbol = entry.symbol;
break;
}
}
entry = entry.next;
}
return foundSymbol;
}
private void visitBuiltInTypeNode(BLangType typeNode, TypeKind typeKind, SymbolEnv env) {
Name typeName = names.fromTypeKind(typeKind);
BSymbol typeSymbol = lookupMemberSymbol(typeNode.pos, symTable.rootScope,
env, typeName, SymTag.TYPE);
if (typeSymbol == symTable.notFoundSymbol) {
dlog.error(typeNode.pos, diagCode, typeName);
}
typeNode.setBType(typeSymbol.type);
resultType = typeSymbol.type;
}
private void addNamespacesInScope(Map<Name, BXMLNSSymbol> namespaces, SymbolEnv env) {
if (env == null) {
return;
}
env.scope.entries.forEach((name, scopeEntry) -> {
if (scopeEntry.symbol.kind == SymbolKind.XMLNS) {
BXMLNSSymbol nsSymbol = (BXMLNSSymbol) scopeEntry.symbol;
if (!namespaces.containsKey(name)) {
namespaces.put(name, nsSymbol);
}
}
});
addNamespacesInScope(namespaces, env.enclEnv);
}
private boolean isMemberAccessAllowed(SymbolEnv env, BSymbol symbol) {
if (Symbols.isPublic(symbol)) {
return true;
}
if (!Symbols.isPrivate(symbol)) {
return env.enclPkg.symbol.pkgID == symbol.pkgID;
}
if (env.enclType != null) {
return env.enclType.getBType().tsymbol == symbol.owner;
}
return isMemberAllowed(env, symbol);
}
private boolean isMemberAllowed(SymbolEnv env, BSymbol symbol) {
return env != null && (env.enclInvokable != null
&& env.enclInvokable.symbol.receiverSymbol != null
&& env.enclInvokable.symbol.receiverSymbol.type.tsymbol == symbol.owner
|| isMemberAllowed(env.enclEnv, symbol));
}
private BType computeIntersectionType(BLangIntersectionTypeNode intersectionTypeNode) {
List<BLangType> constituentTypeNodes = intersectionTypeNode.constituentTypeNodes;
Map<BType, BLangType> typeBLangTypeMap = new HashMap<>();
boolean validIntersection = true;
boolean isErrorIntersection = false;
boolean isAlreadyExistingType = false;
BLangType bLangTypeOne = constituentTypeNodes.get(0);
BType typeOne = resolveTypeNode(bLangTypeOne, env);
if (typeOne == symTable.noType) {
return symTable.noType;
}
typeBLangTypeMap.put(typeOne, bLangTypeOne);
BLangType bLangTypeTwo = constituentTypeNodes.get(1);
BType typeTwo = resolveTypeNode(bLangTypeTwo, env);
if (typeTwo == symTable.noType) {
return symTable.noType;
}
typeBLangTypeMap.put(typeTwo, bLangTypeTwo);
boolean hasReadOnlyType = typeOne == symTable.readonlyType || typeTwo == symTable.readonlyType;
if (typeOne.tag == TypeTags.ERROR || typeTwo.tag == TypeTags.ERROR) {
isErrorIntersection = true;
}
if (!(hasReadOnlyType || isErrorIntersection)) {
dlog.error(intersectionTypeNode.pos,
DiagnosticErrorCode.UNSUPPORTED_TYPE_INTERSECTION, intersectionTypeNode);
return symTable.semanticError;
}
BType potentialIntersectionType = getPotentialIntersection(
Types.IntersectionContext.from(dlog, bLangTypeOne.pos, bLangTypeTwo.pos),
typeOne, typeTwo, this.env);
if (typeOne == potentialIntersectionType || typeTwo == potentialIntersectionType) {
isAlreadyExistingType = true;
}
LinkedHashSet<BType> constituentBTypes = new LinkedHashSet<>();
constituentBTypes.add(typeOne);
constituentBTypes.add(typeTwo);
if (potentialIntersectionType == symTable.semanticError) {
validIntersection = false;
} else {
for (int i = 2; i < constituentTypeNodes.size(); i++) {
BLangType bLangType = constituentTypeNodes.get(i);
BType type = resolveTypeNode(bLangType, env);
if (type.tag == TypeTags.ERROR) {
isErrorIntersection = true;
}
typeBLangTypeMap.put(type, bLangType);
if (!hasReadOnlyType) {
hasReadOnlyType = type == symTable.readonlyType;
}
if (type == symTable.noType) {
return symTable.noType;
}
BType tempIntersectionType = getPotentialIntersection(
Types.IntersectionContext.from(dlog, bLangTypeOne.pos, bLangTypeTwo.pos),
potentialIntersectionType, type, this.env);
if (tempIntersectionType == symTable.semanticError) {
validIntersection = false;
break;
}
if (type == tempIntersectionType) {
potentialIntersectionType = type;
isAlreadyExistingType = true;
} else if (potentialIntersectionType != tempIntersectionType) {
potentialIntersectionType = tempIntersectionType;
isAlreadyExistingType = false;
}
constituentBTypes.add(type);
}
}
if (!validIntersection) {
dlog.error(intersectionTypeNode.pos, DiagnosticErrorCode.INVALID_INTERSECTION_TYPE, intersectionTypeNode);
return symTable.semanticError;
}
if (isErrorIntersection && !isAlreadyExistingType) {
BType detailType = ((BErrorType) potentialIntersectionType).detailType;
boolean existingErrorDetailType = false;
if (detailType.tsymbol != null) {
BSymbol detailTypeSymbol = lookupSymbolInMainSpace(env, detailType.tsymbol.name);
if (detailTypeSymbol != symTable.notFoundSymbol) {
existingErrorDetailType = true;
}
}
return defineIntersectionType((BErrorType) potentialIntersectionType, intersectionTypeNode.pos,
constituentBTypes, existingErrorDetailType, env);
}
if (types.isInherentlyImmutableType(potentialIntersectionType) ||
(Symbols.isFlagOn(potentialIntersectionType.flags, Flags.READONLY) &&
!types.isSubTypeOfBaseType(potentialIntersectionType, TypeTags.OBJECT))) {
return potentialIntersectionType;
}
if (!types.isSelectivelyImmutableType(potentialIntersectionType, false)) {
if (types.isSelectivelyImmutableType(potentialIntersectionType)) {
dlog.error(intersectionTypeNode.pos, DiagnosticErrorCode.INVALID_READONLY_OBJECT_INTERSECTION_TYPE);
} else {
dlog.error(intersectionTypeNode.pos, DiagnosticErrorCode.INVALID_READONLY_INTERSECTION_TYPE,
potentialIntersectionType);
}
return symTable.semanticError;
}
BLangType typeNode = typeBLangTypeMap.get(potentialIntersectionType);
Set<Flag> flagSet;
if (typeNode == null) {
flagSet = new HashSet<>();
} else if (typeNode.getKind() == NodeKind.OBJECT_TYPE) {
flagSet = ((BLangObjectTypeNode) typeNode).flagSet;
} else if (typeNode.getKind() == NodeKind.USER_DEFINED_TYPE) {
flagSet = ((BLangUserDefinedType) typeNode).flagSet;
} else {
flagSet = new HashSet<>();
}
return ImmutableTypeCloner.getImmutableIntersectionType(intersectionTypeNode.pos, types,
(SelectivelyImmutableReferenceType)
potentialIntersectionType,
env, symTable, anonymousModelHelper, names, flagSet);
}
private BIntersectionType defineIntersectionType(BErrorType intersectionErrorType,
Location pos,
LinkedHashSet<BType> constituentBTypes,
boolean isAlreadyDefinedDetailType, SymbolEnv env) {
BSymbol owner = intersectionErrorType.tsymbol.owner;
PackageID pkgId = intersectionErrorType.tsymbol.pkgID;
SymbolEnv pkgEnv = symTable.pkgEnvMap.get(env.enclPkg.symbol);
if (!isAlreadyDefinedDetailType && intersectionErrorType.detailType.tag == TypeTags.RECORD) {
defineErrorDetailRecord((BRecordType) intersectionErrorType.detailType, pos, pkgEnv);
}
return defineErrorIntersectionType(intersectionErrorType, constituentBTypes, pkgId, owner);
}
private BLangTypeDefinition defineErrorDetailRecord(BRecordType detailRecord, Location pos, SymbolEnv env) {
BRecordTypeSymbol detailRecordSymbol = (BRecordTypeSymbol) detailRecord.tsymbol;
for (BField field : detailRecord.fields.values()) {
BVarSymbol fieldSymbol = field.symbol;
detailRecordSymbol.scope.define(fieldSymbol.name, fieldSymbol);
}
BLangRecordTypeNode detailRecordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(new ArrayList<>(),
detailRecord, pos);
TypeDefBuilderHelper.createInitFunctionForRecordType(detailRecordTypeNode, env, names, symTable);
BLangTypeDefinition detailRecordTypeDefinition = TypeDefBuilderHelper.addTypeDefinition(detailRecord,
detailRecordSymbol,
detailRecordTypeNode,
env);
detailRecordTypeDefinition.pos = pos;
return detailRecordTypeDefinition;
}
private BIntersectionType defineErrorIntersectionType(IntersectableReferenceType effectiveType,
LinkedHashSet<BType> constituentBTypes, PackageID pkgId,
BSymbol owner) {
BTypeSymbol intersectionTypeSymbol = Symbols.createTypeSymbol(SymTag.INTERSECTION_TYPE,
Flags.asMask(EnumSet.of(Flag.PUBLIC)),
Names.EMPTY, pkgId, null, owner,
symTable.builtinPos, VIRTUAL);
BIntersectionType intersectionType = new BIntersectionType(intersectionTypeSymbol, constituentBTypes,
effectiveType);
intersectionTypeSymbol.type = intersectionType;
return intersectionType;
}
private BType getPotentialIntersection(Types.IntersectionContext intersectionContext,
BType lhsType, BType rhsType, SymbolEnv env) {
if (lhsType == symTable.readonlyType) {
return rhsType;
}
if (rhsType == symTable.readonlyType) {
return lhsType;
}
return types.getTypeIntersection(intersectionContext, lhsType, rhsType, env);
}
boolean validateInferTypedescParams(Location pos, List<? extends BLangVariable> parameters, BType retType) {
int inferTypedescParamCount = 0;
BVarSymbol paramWithInferredTypedescDefault = null;
Location inferDefaultLocation = null;
for (BLangVariable parameter : parameters) {
BType type = parameter.getBType();
BLangExpression expr = parameter.expr;
if (type != null && type.tag == TypeTags.TYPEDESC && expr != null &&
expr.getKind() == NodeKind.INFER_TYPEDESC_EXPR) {
paramWithInferredTypedescDefault = parameter.symbol;
inferDefaultLocation = expr.pos;
inferTypedescParamCount++;
}
}
if (inferTypedescParamCount > 1) {
dlog.error(pos, DiagnosticErrorCode.MULTIPLE_INFER_TYPEDESC_PARAMS);
return false;
}
if (paramWithInferredTypedescDefault == null) {
return true;
}
if (retType == null) {
dlog.error(inferDefaultLocation,
DiagnosticErrorCode.CANNOT_USE_INFERRED_TYPEDESC_DEFAULT_WITH_UNREFERENCED_PARAM);
return false;
}
if (unifier.refersInferableParamName(paramWithInferredTypedescDefault.name.value, retType)) {
return true;
}
dlog.error(inferDefaultLocation,
DiagnosticErrorCode.CANNOT_USE_INFERRED_TYPEDESC_DEFAULT_WITH_UNREFERENCED_PARAM);
return false;
}
private boolean isModuleLevelVar(BSymbol symbol) {
return symbol.getKind() == SymbolKind.VARIABLE && symbol.owner.getKind() == SymbolKind.PACKAGE;
}
private static class ParameterizedTypeInfo {
BType paramValueType;
int index = -1;
private ParameterizedTypeInfo(BType paramValueType) {
this.paramValueType = paramValueType;
}
private ParameterizedTypeInfo(BType paramValueType, int index) {
this.paramValueType = paramValueType;
this.index = index;
}
}
} | class SymbolResolver extends BLangNodeVisitor {
private static final CompilerContext.Key<SymbolResolver> SYMBOL_RESOLVER_KEY =
new CompilerContext.Key<>();
private SymbolTable symTable;
private Names names;
private BLangDiagnosticLog dlog;
private Types types;
private SymbolEnv env;
private BType resultType;
private DiagnosticCode diagCode;
private SymbolEnter symbolEnter;
private BLangAnonymousModelHelper anonymousModelHelper;
private BLangMissingNodesHelper missingNodesHelper;
private Unifier unifier;
public static SymbolResolver getInstance(CompilerContext context) {
SymbolResolver symbolResolver = context.get(SYMBOL_RESOLVER_KEY);
if (symbolResolver == null) {
symbolResolver = new SymbolResolver(context);
}
return symbolResolver;
}
public SymbolResolver(CompilerContext context) {
context.put(SYMBOL_RESOLVER_KEY, this);
this.symTable = SymbolTable.getInstance(context);
this.names = Names.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
this.types = Types.getInstance(context);
this.symbolEnter = SymbolEnter.getInstance(context);
this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context);
this.missingNodesHelper = BLangMissingNodesHelper.getInstance(context);
this.unifier = new Unifier();
}
private boolean isRedeclaredSymbol(BSymbol symbol, BSymbol foundSym) {
return hasSameOwner(symbol, foundSym) || isSymbolRedeclaredInTestPackage(symbol, foundSym);
}
public boolean checkForUniqueSymbol(SymbolEnv env, BSymbol symbol) {
BSymbol foundSym = lookupSymbolInMainSpace(env, symbol.name);
if (foundSym == symTable.notFoundSymbol) {
return true;
}
if (symbol.tag == SymTag.CONSTRUCTOR && foundSym.tag == SymTag.ERROR) {
return false;
}
return !hasSameOwner(symbol, foundSym);
}
/**
* This method will check whether the given symbol that is being defined is unique by only checking its current
* environment scope.
*
* @param pos symbol pos for diagnostic purpose.
* @param env symbol environment to lookup.
* @param symbol the symbol that is being defined.
* @param expSymTag expected tag of the symbol for.
* @return true if the symbol is unique, false otherwise.
*/
public boolean checkForUniqueSymbolInCurrentScope(Location pos, SymbolEnv env, BSymbol symbol,
int expSymTag) {
BSymbol foundSym = lookupSymbolInGivenScope(env, symbol.name, expSymTag);
if (foundSym == symTable.notFoundSymbol) {
return true;
}
return isDistinctSymbol(pos, symbol, foundSym);
}
/**
* This method will check whether the symbol being defined is unique comparing it with the found symbol
* from the scope.
*
* @param pos symbol pos for diagnostic purpose.
* @param symbol symbol that is being defined.
* @param foundSym symbol that is found from the scope.
* @return true if the symbol is unique, false otherwise.
*/
private boolean isDistinctSymbol(Location pos, BSymbol symbol, BSymbol foundSym) {
if (symbol.tag == SymTag.CONSTRUCTOR && foundSym.tag == SymTag.ERROR) {
return false;
}
if (isSymbolDefinedInRootPkgLvl(foundSym)) {
dlog.error(pos, DiagnosticErrorCode.REDECLARED_BUILTIN_SYMBOL, symbol.name);
return false;
}
return true;
}
/**
* This method will check whether the symbol being defined is unique comparing it with the found symbol
* from the scope.
*
* @param symbol symbol that is being defined.
* @param foundSym symbol that is found from the scope.
* @return true if the symbol is unique, false otherwise.
*/
private boolean isDistinctSymbol(BSymbol symbol, BSymbol foundSym) {
if (symbol.tag == SymTag.CONSTRUCTOR && foundSym.tag == SymTag.ERROR) {
return false;
}
if (isSymbolDefinedInRootPkgLvl(foundSym)) {
return false;
}
return !hasSameOwner(symbol, foundSym);
}
private boolean hasSameOwner(BSymbol symbol, BSymbol foundSym) {
if (foundSym.owner == symbol.owner ||
(foundSym.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE &&
(symbol.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE &&
foundSym.pkgID.equals(symbol.pkgID)) {
return true;
} else if (Symbols.isFlagOn(symbol.owner.flags, Flags.LAMBDA) &&
((foundSym.owner.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE)) {
return true;
} else if (((symbol.owner.tag & SymTag.LET) == SymTag.LET) &&
((foundSym.owner.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE)) {
return true;
}
return false;
}
private boolean isSymbolRedeclaredInTestPackage(BSymbol symbol, BSymbol foundSym) {
if (Symbols.isFlagOn(symbol.owner.flags, Flags.TESTABLE) &&
!Symbols.isFlagOn(foundSym.owner.flags, Flags.TESTABLE)) {
return true;
}
return false;
}
private boolean isSymbolDefinedInRootPkgLvl(BSymbol foundSym) {
return symTable.rootPkgSymbol.pkgID.equals(foundSym.pkgID) &&
(foundSym.tag & SymTag.VARIABLE_NAME) == SymTag.VARIABLE_NAME;
}
/**
* Lookup the symbol using given name in the given environment scope only.
*
* @param env environment to lookup the symbol.
* @param name name of the symbol to lookup.
* @param expSymTag expected tag of the symbol.
* @return if a symbol is found return it.
*/
public BSymbol lookupSymbolInGivenScope(SymbolEnv env, Name name, int expSymTag) {
ScopeEntry entry = env.scope.lookup(name);
while (entry != NOT_FOUND_ENTRY) {
if (symTable.rootPkgSymbol.pkgID.equals(entry.symbol.pkgID) &&
(entry.symbol.tag & SymTag.VARIABLE_NAME) == SymTag.VARIABLE_NAME) {
return entry.symbol;
}
if ((entry.symbol.tag & expSymTag) == expSymTag && !isFieldRefFromWithinARecord(entry.symbol, env)) {
return entry.symbol;
}
entry = entry.next;
}
return symTable.notFoundSymbol;
}
public boolean checkForUniqueMemberSymbol(Location pos, SymbolEnv env, BSymbol symbol) {
BSymbol foundSym = lookupMemberSymbol(pos, env.scope, env, symbol.name, symbol.tag);
if (foundSym != symTable.notFoundSymbol) {
dlog.error(pos, DiagnosticErrorCode.REDECLARED_SYMBOL, symbol.name);
return false;
}
return true;
}
public BSymbol resolveBinaryOperator(OperatorKind opKind,
BType lhsType,
BType rhsType) {
return resolveOperator(names.fromString(opKind.value()), Lists.of(lhsType, rhsType));
}
BSymbol createEqualityOperator(OperatorKind opKind, BType lhsType, BType rhsType) {
List<BType> paramTypes = Lists.of(lhsType, rhsType);
BType retType = symTable.booleanType;
BInvokableType opType = new BInvokableType(paramTypes, retType, null);
return new BOperatorSymbol(names.fromString(opKind.value()), null, opType, null, symTable.builtinPos, VIRTUAL);
}
public BSymbol resolveUnaryOperator(Location pos,
OperatorKind opKind,
BType type) {
return resolveOperator(names.fromString(opKind.value()), Lists.of(type));
}
public BSymbol resolveOperator(Name name, List<BType> types) {
ScopeEntry entry = symTable.rootScope.lookup(name);
return resolveOperator(entry, types);
}
BSymbol createBinaryComparisonOperator(OperatorKind opKind, BType lhsType, BType rhsType) {
List<BType> paramTypes = Lists.of(lhsType, rhsType);
BInvokableType opType = new BInvokableType(paramTypes, symTable.booleanType, null);
return new BOperatorSymbol(names.fromString(opKind.value()), null, opType, null, symTable.builtinPos, VIRTUAL);
}
BSymbol createBinaryOperator(OperatorKind opKind, BType lhsType, BType rhsType, BType retType) {
List<BType> paramTypes = Lists.of(lhsType, rhsType);
BInvokableType opType = new BInvokableType(paramTypes, retType, null);
return new BOperatorSymbol(names.fromString(opKind.value()), null, opType, null, symTable.builtinPos, VIRTUAL);
}
public BSymbol resolvePkgSymbol(Location pos, SymbolEnv env, Name pkgAlias) {
if (pkgAlias == Names.EMPTY) {
return env.enclPkg.symbol;
}
BSymbol pkgSymbol = lookupSymbolInPrefixSpace(env, pkgAlias);
if (pkgSymbol == symTable.notFoundSymbol) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_MODULE, pkgAlias.value);
}
return pkgSymbol;
}
public BSymbol resolvePrefixSymbol(SymbolEnv env, Name pkgAlias, Name compUnit) {
if (pkgAlias == Names.EMPTY) {
return env.enclPkg.symbol;
}
ScopeEntry entry = env.scope.lookup(pkgAlias);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.XMLNS) == SymTag.XMLNS) {
return entry.symbol;
}
if ((entry.symbol.tag & SymTag.IMPORT) == SymTag.IMPORT &&
((BPackageSymbol) entry.symbol).compUnit.equals(compUnit)) {
((BPackageSymbol) entry.symbol).isUsed = true;
return entry.symbol;
}
entry = entry.next;
}
if (env.enclEnv != null) {
return resolvePrefixSymbol(env.enclEnv, pkgAlias, compUnit);
}
return symTable.notFoundSymbol;
}
public BSymbol resolveAnnotation(Location pos, SymbolEnv env, Name pkgAlias, Name annotationName) {
return this.lookupAnnotationSpaceSymbolInPackage(pos, env, pkgAlias, annotationName);
}
public BSymbol resolveStructField(Location location, SymbolEnv env, Name fieldName,
BTypeSymbol structSymbol) {
return lookupMemberSymbol(location, structSymbol.scope, env, fieldName, SymTag.VARIABLE);
}
public BSymbol resolveObjectField(Location location, SymbolEnv env, Name fieldName,
BTypeSymbol objectSymbol) {
return lookupMemberSymbol(location, objectSymbol.scope, env, fieldName, SymTag.VARIABLE);
}
public BSymbol resolveObjectMethod(Location pos, SymbolEnv env, Name fieldName,
BObjectTypeSymbol objectSymbol) {
return lookupMemberSymbol(pos, objectSymbol.scope, env, fieldName, SymTag.VARIABLE);
}
public BSymbol resolveInvocableObjectField(Location pos, SymbolEnv env, Name fieldName,
BObjectTypeSymbol objectTypeSymbol) {
return lookupMemberSymbol(pos, objectTypeSymbol.scope, env, fieldName, SymTag.VARIABLE);
}
public BType resolveTypeNode(BLangType typeNode, SymbolEnv env) {
return resolveTypeNode(typeNode, env, DiagnosticErrorCode.UNKNOWN_TYPE);
}
public BType resolveTypeNode(BLangType typeNode, SymbolEnv env, DiagnosticCode diagCode) {
SymbolEnv prevEnv = this.env;
DiagnosticCode preDiagCode = this.diagCode;
this.env = env;
this.diagCode = diagCode;
typeNode.accept(this);
this.env = prevEnv;
this.diagCode = preDiagCode;
if (this.resultType != symTable.noType) {
if (typeNode.nullable && this.resultType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) this.resultType;
unionType.add(symTable.nilType);
} else if (typeNode.nullable && resultType.tag != TypeTags.JSON && resultType.tag != TypeTags.ANY) {
this.resultType = BUnionType.create(null, resultType, symTable.nilType);
}
}
typeNode.setBType(resultType);
return resultType;
}
/**
* Return the symbol associated with the given name in the current package. This method first searches the symbol in
* the current scope and proceeds the enclosing scope, if it is not there in the current scope. This process
* continues until the symbol is found or the root scope is reached. This method is mainly meant for checking
* whether a given symbol is already defined in the scope hierarchy.
*
* @param env current symbol environment
* @param name symbol name
* @param expSymTag expected symbol type/tag
* @return resolved symbol
*/
private BSymbol lookupSymbolForDecl(SymbolEnv env, Name name, int expSymTag) {
ScopeEntry entry = env.scope.lookup(name);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & expSymTag) == expSymTag) {
return entry.symbol;
}
entry = entry.next;
}
if (env.enclEnv != null) {
return lookupSymbol(env.enclEnv, name, expSymTag);
}
return symTable.notFoundSymbol;
}
/**
* Return the symbol associated with the given name in the current package. This method first searches the symbol in
* the current scope and proceeds the enclosing scope, if it is not there in the current scope. This process
* continues until the symbol is found or the root scope is reached. This method is meant for looking up a symbol
* when they are referenced. If looking up a symbol from within a record type definition, this method ignores record
* fields. This is done so that default value expressions cannot refer to other record fields.
*
* @param env current symbol environment
* @param name symbol name
* @param expSymTag expected symbol type/tag
* @return resolved symbol
*/
private BSymbol lookupSymbol(SymbolEnv env, Name name, int expSymTag) {
ScopeEntry entry = env.scope.lookup(name);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & expSymTag) == expSymTag && !isFieldRefFromWithinARecord(entry.symbol, env)) {
return entry.symbol;
}
entry = entry.next;
}
if (env.enclEnv != null) {
return lookupSymbol(env.enclEnv, name, expSymTag);
}
return symTable.notFoundSymbol;
}
/**
* Checks whether the specified symbol is a symbol of a record field and whether that field is referred to from
* within a record type definition (not necessarily the owner of the field).
*
* @param symbol symbol to be tested
* @param env the environment in which the symbol was found
* @return returns `true` if the aboove described condition holds
*/
private boolean isFieldRefFromWithinARecord(BSymbol symbol, SymbolEnv env) {
return (symbol.owner.tag & SymTag.RECORD) == SymTag.RECORD &&
env.enclType != null && env.enclType.getKind() == NodeKind.RECORD_TYPE;
}
public BSymbol lookupSymbolInMainSpace(SymbolEnv env, Name name) {
return lookupSymbol(env, name, SymTag.MAIN);
}
public BSymbol lookupSymbolInAnnotationSpace(SymbolEnv env, Name name) {
return lookupSymbol(env, name, SymTag.ANNOTATION);
}
public BSymbol lookupSymbolInPrefixSpace(SymbolEnv env, Name name) {
return lookupSymbol(env, name, SymTag.IMPORT);
}
public BSymbol lookupSymbolInConstructorSpace(SymbolEnv env, Name name) {
return lookupSymbol(env, name, SymTag.CONSTRUCTOR);
}
public BSymbol lookupLangLibMethod(BType type, Name name) {
if (symTable.langAnnotationModuleSymbol == null) {
return symTable.notFoundSymbol;
}
BSymbol bSymbol;
switch (type.tag) {
case TypeTags.ARRAY:
case TypeTags.TUPLE:
bSymbol = lookupMethodInModule(symTable.langArrayModuleSymbol, name, env);
break;
case TypeTags.DECIMAL:
bSymbol = lookupMethodInModule(symTable.langDecimalModuleSymbol, name, env);
break;
case TypeTags.ERROR:
bSymbol = lookupMethodInModule(symTable.langErrorModuleSymbol, name, env);
break;
case TypeTags.FLOAT:
bSymbol = lookupMethodInModule(symTable.langFloatModuleSymbol, name, env);
break;
case TypeTags.FUTURE:
bSymbol = lookupMethodInModule(symTable.langFutureModuleSymbol, name, env);
break;
case TypeTags.INT:
case TypeTags.SIGNED32_INT:
case TypeTags.SIGNED16_INT:
case TypeTags.SIGNED8_INT:
case TypeTags.UNSIGNED32_INT:
case TypeTags.UNSIGNED16_INT:
case TypeTags.UNSIGNED8_INT:
case TypeTags.BYTE:
bSymbol = lookupMethodInModule(symTable.langIntModuleSymbol, name, env);
break;
case TypeTags.MAP:
case TypeTags.RECORD:
bSymbol = lookupMethodInModule(symTable.langMapModuleSymbol, name, env);
break;
case TypeTags.OBJECT:
bSymbol = lookupMethodInModule(symTable.langObjectModuleSymbol, name, env);
break;
case TypeTags.STREAM:
bSymbol = lookupMethodInModule(symTable.langStreamModuleSymbol, name, env);
break;
case TypeTags.TABLE:
bSymbol = lookupMethodInModule(symTable.langTableModuleSymbol, name, env);
break;
case TypeTags.STRING:
case TypeTags.CHAR_STRING:
bSymbol = lookupMethodInModule(symTable.langStringModuleSymbol, name, env);
break;
case TypeTags.TYPEDESC:
bSymbol = lookupMethodInModule(symTable.langTypedescModuleSymbol, name, env);
break;
case TypeTags.XML:
case TypeTags.XML_ELEMENT:
case TypeTags.XML_COMMENT:
case TypeTags.XML_PI:
bSymbol = lookupMethodInModule(symTable.langXmlModuleSymbol, name, env);
break;
case TypeTags.XML_TEXT:
bSymbol = lookupMethodInModule(symTable.langXmlModuleSymbol, name, env);
if (bSymbol == symTable.notFoundSymbol) {
bSymbol = lookupMethodInModule(symTable.langStringModuleSymbol, name, env);
}
break;
case TypeTags.BOOLEAN:
bSymbol = lookupMethodInModule(symTable.langBooleanModuleSymbol, name, env);
break;
case TypeTags.UNION:
Iterator<BType> itr = ((BUnionType) type).getMemberTypes().iterator();
if (!itr.hasNext()) {
throw new IllegalArgumentException(
format("Union type '%s' does not have member types", type.toString()));
}
BType member = itr.next();
if (types.isSubTypeOfBaseType(type, member.tag)) {
bSymbol = lookupLangLibMethod(member, name);
} else {
bSymbol = symTable.notFoundSymbol;
}
break;
case TypeTags.FINITE:
if (types.isAssignable(type, symTable.intType)) {
return lookupLangLibMethod(symTable.intType, name);
}
if (types.isAssignable(type, symTable.stringType)) {
return lookupLangLibMethod(symTable.stringType, name);
}
if (types.isAssignable(type, symTable.decimalType)) {
return lookupLangLibMethod(symTable.decimalType, name);
}
if (types.isAssignable(type, symTable.floatType)) {
return lookupLangLibMethod(symTable.floatType, name);
}
if (types.isAssignable(type, symTable.booleanType)) {
return lookupLangLibMethod(symTable.booleanType, name);
}
bSymbol = symTable.notFoundSymbol;
break;
default:
bSymbol = symTable.notFoundSymbol;
}
if (bSymbol == symTable.notFoundSymbol && type.tag != TypeTags.OBJECT) {
bSymbol = lookupMethodInModule(symTable.langValueModuleSymbol, name, env);
}
if (bSymbol == symTable.notFoundSymbol) {
bSymbol = lookupMethodInModule(symTable.langInternalModuleSymbol, name, env);
}
return bSymbol;
}
/**
* Recursively analyse the symbol env to find the closure variable symbol that is being resolved.
*
* @param env symbol env to analyse and find the closure variable.
* @param name name of the symbol to lookup
* @param expSymTag symbol tag
* @return closure symbol wrapper along with the resolved count
*/
public BSymbol lookupClosureVarSymbol(SymbolEnv env, Name name, int expSymTag) {
ScopeEntry entry = env.scope.lookup(name);
while (entry != NOT_FOUND_ENTRY) {
if (symTable.rootPkgSymbol.pkgID.equals(entry.symbol.pkgID) &&
(entry.symbol.tag & SymTag.VARIABLE_NAME) == SymTag.VARIABLE_NAME) {
return entry.symbol;
}
if ((entry.symbol.tag & expSymTag) == expSymTag && !isFieldRefFromWithinARecord(entry.symbol, env)) {
return entry.symbol;
}
entry = entry.next;
}
if (env.enclEnv == null || env.enclEnv.node == null) {
return symTable.notFoundSymbol;
}
return lookupClosureVarSymbol(env.enclEnv, name, expSymTag);
}
public BSymbol lookupMainSpaceSymbolInPackage(Location pos,
SymbolEnv env,
Name pkgAlias,
Name name) {
if (pkgAlias == Names.EMPTY) {
return lookupSymbolInMainSpace(env, name);
}
BSymbol pkgSymbol =
resolvePrefixSymbol(env, pkgAlias, names.fromString(pos.lineRange().filePath()));
if (pkgSymbol == symTable.notFoundSymbol) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_MODULE, pkgAlias.value);
return pkgSymbol;
}
return lookupMemberSymbol(pos, pkgSymbol.scope, env, name, SymTag.MAIN);
}
public BSymbol lookupPrefixSpaceSymbolInPackage(Location pos,
SymbolEnv env,
Name pkgAlias,
Name name) {
if (pkgAlias == Names.EMPTY) {
return lookupSymbolInPrefixSpace(env, name);
}
BSymbol pkgSymbol =
resolvePrefixSymbol(env, pkgAlias, names.fromString(pos.lineRange().filePath()));
if (pkgSymbol == symTable.notFoundSymbol) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_MODULE, pkgAlias.value);
return pkgSymbol;
}
return lookupMemberSymbol(pos, pkgSymbol.scope, env, name, SymTag.IMPORT);
}
public BSymbol lookupAnnotationSpaceSymbolInPackage(Location pos,
SymbolEnv env,
Name pkgAlias,
Name name) {
if (pkgAlias == Names.EMPTY) {
return lookupSymbolInAnnotationSpace(env, name);
}
BSymbol pkgSymbol =
resolvePrefixSymbol(env, pkgAlias, names.fromString(pos.lineRange().filePath()));
if (pkgSymbol == symTable.notFoundSymbol) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_MODULE, pkgAlias.value);
return pkgSymbol;
}
return lookupMemberSymbol(pos, pkgSymbol.scope, env, name, SymTag.ANNOTATION);
}
public BSymbol lookupConstructorSpaceSymbolInPackage(Location pos,
SymbolEnv env,
Name pkgAlias,
Name name) {
if (pkgAlias == Names.EMPTY) {
return lookupSymbolInConstructorSpace(env, name);
}
BSymbol pkgSymbol =
resolvePrefixSymbol(env, pkgAlias, names.fromString(pos.lineRange().filePath()));
if (pkgSymbol == symTable.notFoundSymbol) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_MODULE, pkgAlias.value);
return pkgSymbol;
}
return lookupMemberSymbol(pos, pkgSymbol.scope, env, name, SymTag.CONSTRUCTOR);
}
public BSymbol lookupMethodInModule(BPackageSymbol moduleSymbol, Name name, SymbolEnv env) {
ScopeEntry entry = moduleSymbol.scope.lookup(name);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.FUNCTION) != SymTag.FUNCTION) {
entry = entry.next;
continue;
}
if (isMemberAccessAllowed(env, entry.symbol)) {
return entry.symbol;
}
return symTable.notFoundSymbol;
}
return symTable.notFoundSymbol;
}
/**
* Return the symbol with the given name.
* This method only looks at the symbol defined in the given scope.
*
* @param pos diagnostic position
* @param scope current scope
* @param env symbol environment
* @param name symbol name
* @param expSymTag expected symbol type/tag
* @return resolved symbol
*/
public BSymbol lookupMemberSymbol(Location pos,
Scope scope,
SymbolEnv env,
Name name,
int expSymTag) {
ScopeEntry entry = scope.lookup(name);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & expSymTag) != expSymTag) {
entry = entry.next;
continue;
}
if (isMemberAccessAllowed(env, entry.symbol)) {
return entry.symbol;
} else {
dlog.error(pos, DiagnosticErrorCode.ATTEMPT_REFER_NON_ACCESSIBLE_SYMBOL, entry.symbol.name);
return symTable.notFoundSymbol;
}
}
return symTable.notFoundSymbol;
}
/**
* Resolve and return the namespaces visible to the given environment, as a map.
*
* @param env Environment to get the visible namespaces
* @return Map of namespace symbols visible to the given environment
*/
public Map<Name, BXMLNSSymbol> resolveAllNamespaces(SymbolEnv env) {
Map<Name, BXMLNSSymbol> namespaces = new LinkedHashMap<Name, BXMLNSSymbol>();
addNamespacesInScope(namespaces, env);
return namespaces;
}
public void boostrapErrorType() {
ScopeEntry entry = symTable.rootPkgSymbol.scope.lookup(Names.ERROR);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.TYPE) != SymTag.TYPE) {
entry = entry.next;
continue;
}
symTable.errorType = (BErrorType) entry.symbol.type;
symTable.detailType = (BMapType) symTable.errorType.detailType;
return;
}
throw new IllegalStateException("built-in error not found ?");
}
public void defineOperators() {
symTable.defineOperators();
}
public void bootstrapAnydataType() {
ScopeEntry entry = symTable.langAnnotationModuleSymbol.scope.lookup(Names.ANYDATA);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.TYPE) != SymTag.TYPE) {
entry = entry.next;
continue;
}
BUnionType type = (BUnionType) entry.symbol.type;
symTable.anydataType = new BAnydataType(type);
symTable.anydataOrReadonly = BUnionType.create(null, symTable.anydataType, symTable.readonlyType);
entry.symbol.type = symTable.anydataType;
entry.symbol.origin = BUILTIN;
symTable.anydataType.tsymbol = new BTypeSymbol(SymTag.TYPE, Flags.PUBLIC, Names.ANYDATA,
PackageID.ANNOTATIONS, symTable.anydataType, symTable.rootPkgSymbol, symTable.builtinPos, BUILTIN);
return;
}
throw new IllegalStateException("built-in 'anydata' type not found");
}
public void bootstrapJsonType() {
ScopeEntry entry = symTable.langAnnotationModuleSymbol.scope.lookup(Names.JSON);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.TYPE) != SymTag.TYPE) {
entry = entry.next;
continue;
}
BUnionType type = (BUnionType) entry.symbol.type;
symTable.jsonType = new BJSONType(type);
symTable.jsonType.tsymbol = new BTypeSymbol(SymTag.TYPE, Flags.PUBLIC, Names.JSON, PackageID.ANNOTATIONS,
symTable.jsonType, symTable.langAnnotationModuleSymbol, symTable.builtinPos, BUILTIN);
entry.symbol.type = symTable.jsonType;
entry.symbol.origin = BUILTIN;
return;
}
throw new IllegalStateException("built-in 'json' type not found");
}
public void bootstrapCloneableType() {
if (symTable.langValueModuleSymbol != null) {
ScopeEntry entry = symTable.langValueModuleSymbol.scope.lookup(Names.CLONEABLE);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.TYPE) != SymTag.TYPE) {
entry = entry.next;
continue;
}
symTable.cloneableType = (BUnionType) entry.symbol.type;
symTable.cloneableType.tsymbol =
new BTypeSymbol(SymTag.TYPE, Flags.PUBLIC, Names.CLONEABLE,
PackageID.VALUE, symTable.cloneableType, symTable.langValueModuleSymbol,
symTable.builtinPos, BUILTIN);
symTable.detailType = new BMapType(TypeTags.MAP, symTable.cloneableType, null);
symTable.errorType = new BErrorType(null, symTable.detailType);
symTable.errorType.tsymbol = new BErrorTypeSymbol(SymTag.ERROR, Flags.PUBLIC, Names.ERROR,
symTable.rootPkgSymbol.pkgID, symTable.errorType, symTable.rootPkgSymbol, symTable.builtinPos
, BUILTIN);
symTable.errorOrNilType = BUnionType.create(null, symTable.errorType, symTable.nilType);
symTable.anyOrErrorType = BUnionType.create(null, symTable.anyType, symTable.errorType);
symTable.mapAllType = new BMapType(TypeTags.MAP, symTable.anyOrErrorType, null);
symTable.arrayAllType = new BArrayType(symTable.anyOrErrorType);
symTable.typeDesc.constraint = symTable.anyOrErrorType;
symTable.futureType.constraint = symTable.anyOrErrorType;
symTable.pureType = BUnionType.create(null, symTable.anydataType, symTable.errorType);
return;
}
throw new IllegalStateException("built-in 'lang.value:Cloneable' type not found");
}
ScopeEntry entry = symTable.rootPkgSymbol.scope.lookup(Names.CLONEABLE_INTERNAL);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.TYPE) != SymTag.TYPE) {
entry = entry.next;
continue;
}
entry.symbol.type = symTable.cloneableType;
break;
}
}
public void bootstrapIntRangeType() {
ScopeEntry entry = symTable.langInternalModuleSymbol.scope.lookup(Names.CREATE_INT_RANGE);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.INVOKABLE) != SymTag.INVOKABLE) {
entry = entry.next;
continue;
}
symTable.intRangeType = (BObjectType) ((BInvokableType) entry.symbol.type).retType;
symTable.defineIntRangeOperations();
return;
}
throw new IllegalStateException("built-in Integer Range type not found ?");
}
public void bootstrapIterableType() {
ScopeEntry entry = symTable.langObjectModuleSymbol.scope.lookup(Names.OBJECT_ITERABLE);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.TYPE) != SymTag.TYPE) {
entry = entry.next;
continue;
}
symTable.iterableType = (BObjectType) entry.symbol.type;
return;
}
throw new IllegalStateException("built-in distinct Iterable type not found ?");
}
public void loadRawTemplateType() {
ScopeEntry entry = symTable.langObjectModuleSymbol.scope.lookup(Names.RAW_TEMPLATE);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.TYPE) != SymTag.TYPE) {
entry = entry.next;
continue;
}
symTable.rawTemplateType = (BObjectType) entry.symbol.type;
return;
}
throw new IllegalStateException("'lang.object:RawTemplate' type not found");
}
public void visit(BLangValueType valueTypeNode) {
visitBuiltInTypeNode(valueTypeNode, valueTypeNode.typeKind, this.env);
}
public void visit(BLangBuiltInRefTypeNode builtInRefType) {
visitBuiltInTypeNode(builtInRefType, builtInRefType.typeKind, this.env);
}
public void visit(BLangArrayType arrayTypeNode) {
resultType = resolveTypeNode(arrayTypeNode.elemtype, env, diagCode);
if (resultType == symTable.noType) {
return;
}
boolean isError = false;
for (int i = 0; i < arrayTypeNode.dimensions; i++) {
BTypeSymbol arrayTypeSymbol = Symbols.createTypeSymbol(SymTag.ARRAY_TYPE, Flags.PUBLIC, Names.EMPTY,
env.enclPkg.symbol.pkgID, null, env.scope.owner,
arrayTypeNode.pos, SOURCE);
BArrayType arrType;
if (arrayTypeNode.sizes.length == 0) {
arrType = new BArrayType(resultType, arrayTypeSymbol);
} else {
BLangExpression size = arrayTypeNode.sizes[i];
if (size.getKind() == NodeKind.LITERAL || size.getKind() == NodeKind.NUMERIC_LITERAL) {
Integer sizeIndicator = (Integer) (((BLangLiteral) size).getValue());
BArrayState arrayState;
if (sizeIndicator == OPEN_ARRAY_INDICATOR) {
arrayState = BArrayState.OPEN;
} else if (sizeIndicator == INFERRED_ARRAY_INDICATOR) {
arrayState = BArrayState.INFERRED;
} else {
arrayState = BArrayState.CLOSED;
}
arrType = new BArrayType(resultType, arrayTypeSymbol, sizeIndicator, arrayState);
} else {
if (size.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
dlog.error(size.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.intType,
((BLangTypedescExpr) size).getTypeNode());
isError = true;
continue;
}
BLangSimpleVarRef sizeReference = (BLangSimpleVarRef) size;
Name pkgAlias = names.fromIdNode(sizeReference.pkgAlias);
Name typeName = names.fromIdNode(sizeReference.variableName);
BSymbol sizeSymbol = lookupMainSpaceSymbolInPackage(size.pos, env, pkgAlias, typeName);
sizeReference.symbol = sizeSymbol;
if (symTable.notFoundSymbol == sizeSymbol) {
dlog.error(arrayTypeNode.pos, DiagnosticErrorCode.UNDEFINED_SYMBOL, size);
isError = true;
continue;
}
if (sizeSymbol.tag != SymTag.CONSTANT) {
dlog.error(size.pos, DiagnosticErrorCode.INVALID_ARRAY_SIZE_REFERENCE, sizeSymbol);
isError = true;
continue;
}
BConstantSymbol sizeConstSymbol = (BConstantSymbol) sizeSymbol;
BType lengthLiteralType = sizeConstSymbol.literalType;
if (lengthLiteralType.tag != TypeTags.INT) {
dlog.error(size.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.intType,
sizeConstSymbol.literalType);
isError = true;
continue;
}
int length = Integer.parseInt(sizeConstSymbol.type.toString());
arrType = new BArrayType(resultType, arrayTypeSymbol, length, BArrayState.CLOSED);
}
}
arrayTypeSymbol.type = arrType;
resultType = arrayTypeSymbol.type;
markParameterizedType(arrType, arrType.eType);
}
if (isError) {
resultType = symTable.semanticError;
}
}
public void visit(BLangUnionTypeNode unionTypeNode) {
LinkedHashSet<BType> memberTypes = new LinkedHashSet<>();
for (BLangType langType : unionTypeNode.memberTypeNodes) {
BType resolvedType = resolveTypeNode(langType, env);
if (resolvedType == symTable.noType) {
resultType = symTable.noType;
return;
}
memberTypes.add(resolvedType);
}
BTypeSymbol unionTypeSymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, Flags.asMask(EnumSet.of(Flag.PUBLIC)),
Names.EMPTY, env.enclPkg.symbol.pkgID, null,
env.scope.owner, unionTypeNode.pos, SOURCE);
BUnionType unionType = BUnionType.create(unionTypeSymbol, memberTypes);
unionTypeSymbol.type = unionType;
markParameterizedType(unionType, memberTypes);
resultType = unionType;
}
public void visit(BLangIntersectionTypeNode intersectionTypeNode) {
resultType = computeIntersectionType(intersectionTypeNode);
}
public void visit(BLangObjectTypeNode objectTypeNode) {
EnumSet<Flag> flags = EnumSet.copyOf(objectTypeNode.flagSet);
if (objectTypeNode.isAnonymous) {
flags.add(Flag.PUBLIC);
}
int typeFlags = 0;
if (flags.contains(Flag.READONLY)) {
typeFlags |= Flags.READONLY;
}
if (flags.contains(Flag.ISOLATED)) {
typeFlags |= Flags.ISOLATED;
}
if (flags.contains(Flag.SERVICE)) {
typeFlags |= Flags.SERVICE;
}
BTypeSymbol objectSymbol = Symbols.createObjectSymbol(Flags.asMask(flags), Names.EMPTY,
env.enclPkg.symbol.pkgID, null, env.scope.owner, objectTypeNode.pos, SOURCE);
BObjectType objectType = new BObjectType(objectSymbol, typeFlags);
objectSymbol.type = objectType;
objectTypeNode.symbol = objectSymbol;
resultType = objectType;
}
public void visit(BLangRecordTypeNode recordTypeNode) {
if (recordTypeNode.symbol == null) {
EnumSet<Flag> flags = recordTypeNode.isAnonymous ? EnumSet.of(Flag.PUBLIC, Flag.ANONYMOUS)
: EnumSet.noneOf(Flag.class);
BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(Flags.asMask(flags), Names.EMPTY,
env.enclPkg.symbol.pkgID, null,
env.scope.owner, recordTypeNode.pos,
recordTypeNode.isAnonymous ? VIRTUAL : SOURCE);
BRecordType recordType = new BRecordType(recordSymbol);
recordSymbol.type = recordType;
recordTypeNode.symbol = recordSymbol;
if (env.node.getKind() != NodeKind.PACKAGE) {
recordSymbol.name = names.fromString(
anonymousModelHelper.getNextAnonymousTypeKey(env.enclPkg.packageID));
symbolEnter.defineSymbol(recordTypeNode.pos, recordTypeNode.symbol, env);
symbolEnter.defineNode(recordTypeNode, env);
}
resultType = recordType;
} else {
resultType = recordTypeNode.symbol.type;
}
}
public void visit(BLangStreamType streamTypeNode) {
BType type = resolveTypeNode(streamTypeNode.type, env);
BType constraintType = resolveTypeNode(streamTypeNode.constraint, env);
BType error = streamTypeNode.error != null ? resolveTypeNode(streamTypeNode.error, env) : symTable.nilType;
if (constraintType == symTable.noType) {
resultType = symTable.noType;
return;
}
BType streamType = new BStreamType(TypeTags.STREAM, constraintType, error, null);
BTypeSymbol typeSymbol = type.tsymbol;
streamType.tsymbol = Symbols.createTypeSymbol(typeSymbol.tag, typeSymbol.flags, typeSymbol.name,
typeSymbol.originalName, typeSymbol.pkgID, streamType,
typeSymbol.owner, streamTypeNode.pos, SOURCE);
markParameterizedType(streamType, constraintType);
if (error != null) {
markParameterizedType(streamType, error);
}
resultType = streamType;
}
public void visit(BLangTableTypeNode tableTypeNode) {
BType type = resolveTypeNode(tableTypeNode.type, env);
BType constraintType = resolveTypeNode(tableTypeNode.constraint, env);
if (constraintType == symTable.noType) {
resultType = symTable.noType;
return;
}
BTableType tableType = new BTableType(TypeTags.TABLE, constraintType, null);
BTypeSymbol typeSymbol = type.tsymbol;
tableType.tsymbol = Symbols.createTypeSymbol(SymTag.TYPE, Flags.asMask(EnumSet.noneOf(Flag.class)),
typeSymbol.name, typeSymbol.originalName, env.enclPkg.symbol.pkgID, tableType,
env.scope.owner, tableTypeNode.pos, SOURCE);
tableType.tsymbol.flags = typeSymbol.flags;
tableType.constraintPos = tableTypeNode.constraint.pos;
tableType.isTypeInlineDefined = tableTypeNode.isTypeInlineDefined;
if (tableTypeNode.tableKeyTypeConstraint != null) {
tableType.keyTypeConstraint = resolveTypeNode(tableTypeNode.tableKeyTypeConstraint.keyType, env);
tableType.keyPos = tableTypeNode.tableKeyTypeConstraint.pos;
} else if (tableTypeNode.tableKeySpecifier != null) {
BLangTableKeySpecifier tableKeySpecifier = tableTypeNode.tableKeySpecifier;
List<String> fieldNameList = new ArrayList<>();
for (IdentifierNode identifier : tableKeySpecifier.fieldNameIdentifierList) {
fieldNameList.add(((BLangIdentifier) identifier).value);
}
tableType.fieldNameList = fieldNameList;
tableType.keyPos = tableKeySpecifier.pos;
}
if (constraintType.tag == TypeTags.MAP &&
(tableType.fieldNameList != null || tableType.keyTypeConstraint != null) &&
!tableType.tsymbol.owner.getFlags().contains(Flag.LANG_LIB)) {
dlog.error(tableType.keyPos,
DiagnosticErrorCode.KEY_CONSTRAINT_NOT_SUPPORTED_FOR_TABLE_WITH_MAP_CONSTRAINT);
resultType = symTable.semanticError;
return;
}
markParameterizedType(tableType, constraintType);
tableTypeNode.tableType = tableType;
resultType = tableType;
}
public void visit(BLangFiniteTypeNode finiteTypeNode) {
BTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE,
Flags.asMask(EnumSet.noneOf(Flag.class)), Names.EMPTY,
env.enclPkg.symbol.pkgID, null, env.scope.owner,
finiteTypeNode.pos, SOURCE);
BFiniteType finiteType = new BFiniteType(finiteTypeSymbol);
for (BLangExpression literal : finiteTypeNode.valueSpace) {
((BLangLiteral) literal).setBType(symTable.getTypeFromTag(((BLangLiteral) literal).getBType().tag));
finiteType.addValue(literal);
}
finiteTypeSymbol.type = finiteType;
resultType = finiteType;
}
public void visit(BLangTupleTypeNode tupleTypeNode) {
List<BType> memberTypes = new ArrayList<>();
for (BLangType memTypeNode : tupleTypeNode.memberTypeNodes) {
BType type = resolveTypeNode(memTypeNode, env);
if (type == symTable.noType) {
resultType = symTable.noType;
return;
}
memberTypes.add(type);
}
BTypeSymbol tupleTypeSymbol = Symbols.createTypeSymbol(SymTag.TUPLE_TYPE, Flags.asMask(EnumSet.of(Flag.PUBLIC)),
Names.EMPTY, env.enclPkg.symbol.pkgID, null,
env.scope.owner, tupleTypeNode.pos, SOURCE);
BTupleType tupleType = new BTupleType(tupleTypeSymbol, memberTypes);
tupleTypeSymbol.type = tupleType;
if (tupleTypeNode.restParamType != null) {
BType tupleRestType = resolveTypeNode(tupleTypeNode.restParamType, env);
if (tupleRestType == symTable.noType) {
resultType = symTable.noType;
return;
}
tupleType.restType = tupleRestType;
markParameterizedType(tupleType, tupleType.restType);
}
markParameterizedType(tupleType, memberTypes);
resultType = tupleType;
}
public void visit(BLangErrorType errorTypeNode) {
BType detailType = Optional.ofNullable(errorTypeNode.detailType)
.map(bLangType -> resolveTypeNode(bLangType, env)).orElse(symTable.detailType);
if (errorTypeNode.isAnonymous) {
errorTypeNode.flagSet.add(Flag.PUBLIC);
errorTypeNode.flagSet.add(Flag.ANONYMOUS);
}
boolean distinctErrorDef = errorTypeNode.flagSet.contains(Flag.DISTINCT);
if (detailType == symTable.detailType && !distinctErrorDef &&
!this.env.enclPkg.packageID.equals(PackageID.ANNOTATIONS)) {
resultType = symTable.errorType;
return;
}
BErrorTypeSymbol errorTypeSymbol = Symbols
.createErrorSymbol(Flags.asMask(errorTypeNode.flagSet), Names.EMPTY, env.enclPkg.symbol.pkgID,
null, env.scope.owner, errorTypeNode.pos, SOURCE);
if (env.node.getKind() != NodeKind.PACKAGE) {
errorTypeSymbol.name = names.fromString(
anonymousModelHelper.getNextAnonymousTypeKey(env.enclPkg.packageID));
symbolEnter.defineSymbol(errorTypeNode.pos, errorTypeSymbol, env);
}
BErrorType errorType = new BErrorType(errorTypeSymbol, detailType);
errorType.flags |= errorTypeSymbol.flags;
errorTypeSymbol.type = errorType;
markParameterizedType(errorType, detailType);
errorType.typeIdSet = BTypeIdSet.emptySet();
resultType = errorType;
}
public void visit(BLangConstrainedType constrainedTypeNode) {
BType type = resolveTypeNode(constrainedTypeNode.type, env);
BType constraintType = resolveTypeNode(constrainedTypeNode.constraint, env);
if (constraintType == symTable.noType) {
resultType = symTable.noType;
return;
}
BType constrainedType = null;
if (type.tag == TypeTags.FUTURE) {
constrainedType = new BFutureType(TypeTags.FUTURE, constraintType, null);
} else if (type.tag == TypeTags.MAP) {
constrainedType = new BMapType(TypeTags.MAP, constraintType, null);
} else if (type.tag == TypeTags.TYPEDESC) {
constrainedType = new BTypedescType(constraintType, null);
} else if (type.tag == TypeTags.XML) {
if (constraintType.tag == TypeTags.PARAMETERIZED_TYPE) {
BType typedescType = ((BParameterizedType) constraintType).paramSymbol.type;
BType typedescConstraint = ((BTypedescType) typedescType).constraint;
validateXMLConstraintType(typedescConstraint, constrainedTypeNode.pos);
} else {
validateXMLConstraintType(constraintType, constrainedTypeNode.pos);
}
constrainedType = new BXMLType(constraintType, null);
} else {
return;
}
BTypeSymbol typeSymbol = type.tsymbol;
constrainedType.tsymbol = Symbols.createTypeSymbol(typeSymbol.tag, typeSymbol.flags, typeSymbol.name,
typeSymbol.originalName, typeSymbol.pkgID, constrainedType,
typeSymbol.owner, constrainedTypeNode.pos, SOURCE);
markParameterizedType(constrainedType, constraintType);
resultType = constrainedType;
}
private void validateXMLConstraintType(BType constraintType, Location pos) {
if (constraintType.tag == TypeTags.UNION) {
checkUnionTypeForXMLSubTypes((BUnionType) constraintType, pos);
return;
}
if (!TypeTags.isXMLTypeTag(constraintType.tag) && constraintType.tag != TypeTags.NEVER) {
dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_CONSTRAINT, symTable.xmlType, constraintType);
}
}
private void checkUnionTypeForXMLSubTypes(BUnionType constraintUnionType, Location pos) {
for (BType memberType : constraintUnionType.getMemberTypes()) {
if (memberType.tag == TypeTags.UNION) {
checkUnionTypeForXMLSubTypes((BUnionType) memberType, pos);
}
if (!TypeTags.isXMLTypeTag(memberType.tag)) {
dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_CONSTRAINT, symTable.xmlType,
constraintUnionType);
}
}
}
public void visit(BLangUserDefinedType userDefinedTypeNode) {
Name pkgAlias = names.fromIdNode(userDefinedTypeNode.pkgAlias);
Name typeName = names.fromIdNode(userDefinedTypeNode.typeName);
BSymbol symbol = symTable.notFoundSymbol;
if (env.scope.owner.tag == SymTag.ANNOTATION) {
symbol = lookupAnnotationSpaceSymbolInPackage(userDefinedTypeNode.pos, env, pkgAlias, typeName);
}
if (symbol == symTable.notFoundSymbol) {
BSymbol tempSymbol = lookupMainSpaceSymbolInPackage(userDefinedTypeNode.pos, env, pkgAlias, typeName);
if ((tempSymbol.tag & SymTag.TYPE) == SymTag.TYPE) {
symbol = tempSymbol;
} else if (Symbols.isTagOn(tempSymbol, SymTag.VARIABLE) && env.node.getKind() == NodeKind.FUNCTION) {
BLangFunction func = (BLangFunction) env.node;
boolean errored = false;
if (func.returnTypeNode == null ||
(func.hasBody() && func.body.getKind() != NodeKind.EXTERN_FUNCTION_BODY)) {
dlog.error(userDefinedTypeNode.pos,
DiagnosticErrorCode.INVALID_NON_EXTERNAL_DEPENDENTLY_TYPED_FUNCTION);
errored = true;
}
if (tempSymbol.type != null && tempSymbol.type.tag != TypeTags.TYPEDESC) {
dlog.error(userDefinedTypeNode.pos, DiagnosticErrorCode.INVALID_PARAM_TYPE_FOR_RETURN_TYPE,
tempSymbol.type);
errored = true;
}
if (errored) {
this.resultType = symTable.semanticError;
return;
}
ParameterizedTypeInfo parameterizedTypeInfo =
getTypedescParamValueType(func.requiredParams, tempSymbol);
BType paramValType = parameterizedTypeInfo == null ? null : parameterizedTypeInfo.paramValueType;
if (paramValType == symTable.semanticError) {
this.resultType = symTable.semanticError;
return;
}
if (paramValType != null) {
BTypeSymbol tSymbol = new BTypeSymbol(SymTag.TYPE, Flags.PARAMETERIZED | tempSymbol.flags,
tempSymbol.name, tempSymbol.originalName, tempSymbol.pkgID,
null, func.symbol, tempSymbol.pos, VIRTUAL);
tSymbol.type = new BParameterizedType(paramValType, (BVarSymbol) tempSymbol,
tSymbol, tempSymbol.name, parameterizedTypeInfo.index);
tSymbol.type.flags |= Flags.PARAMETERIZED;
this.resultType = tSymbol.type;
userDefinedTypeNode.symbol = tSymbol;
return;
}
}
}
if (symbol == symTable.notFoundSymbol) {
symbol = lookupMemberSymbol(userDefinedTypeNode.pos, symTable.rootScope, this.env, typeName,
SymTag.VARIABLE_NAME);
}
if (this.env.logErrors && symbol == symTable.notFoundSymbol) {
if (!missingNodesHelper.isMissingNode(pkgAlias) && !missingNodesHelper.isMissingNode(typeName) &&
!symbolEnter.isUnknownTypeRef(userDefinedTypeNode)) {
dlog.error(userDefinedTypeNode.pos, diagCode, typeName);
}
resultType = symTable.semanticError;
return;
}
userDefinedTypeNode.symbol = symbol;
resultType = symbol.type;
}
private ParameterizedTypeInfo getTypedescParamValueType(List<BLangSimpleVariable> params, BSymbol varSym) {
for (int i = 0; i < params.size(); i++) {
BLangSimpleVariable param = params.get(i);
if (param.name.value.equals(varSym.name.value)) {
if (param.expr == null || param.expr.getKind() == NodeKind.INFER_TYPEDESC_EXPR) {
return new ParameterizedTypeInfo(((BTypedescType) varSym.type).constraint, i);
}
NodeKind defaultValueExprKind = param.expr.getKind();
if (defaultValueExprKind == NodeKind.TYPEDESC_EXPRESSION) {
return new ParameterizedTypeInfo(
resolveTypeNode(((BLangTypedescExpr) param.expr).typeNode, this.env), i);
}
if (defaultValueExprKind == NodeKind.SIMPLE_VARIABLE_REF) {
Name varName = names.fromIdNode(((BLangSimpleVarRef) param.expr).variableName);
BSymbol typeRefSym = lookupSymbolInMainSpace(this.env, varName);
if (typeRefSym != symTable.notFoundSymbol) {
return new ParameterizedTypeInfo(typeRefSym.type, i);
}
return new ParameterizedTypeInfo(symTable.semanticError);
}
dlog.error(param.pos, DiagnosticErrorCode.INVALID_TYPEDESC_PARAM);
return new ParameterizedTypeInfo(symTable.semanticError);
}
}
return null;
}
@Override
public void visit(BLangFunctionTypeNode functionTypeNode) {
List<BLangVariable> params = functionTypeNode.getParams();
Location pos = functionTypeNode.pos;
BLangType returnTypeNode = functionTypeNode.returnTypeNode;
BType invokableType = createInvokableType(params, functionTypeNode.restParam, returnTypeNode,
Flags.asMask(functionTypeNode.flagSet), env, pos);
resultType = validateInferTypedescParams(pos, params, returnTypeNode == null ?
null : returnTypeNode.getBType()) ? invokableType : symTable.semanticError;
}
public BType createInvokableType(List<? extends BLangVariable> paramVars,
BLangVariable restVariable,
BLangType retTypeVar,
long flags,
SymbolEnv env,
Location location) {
List<BType> paramTypes = new ArrayList<>();
List<BVarSymbol> params = new ArrayList<>();
boolean foundDefaultableParam = false;
List<String> paramNames = new ArrayList<>();
if (Symbols.isFlagOn(flags, Flags.ANY_FUNCTION)) {
BInvokableType bInvokableType = new BInvokableType(null, null, null, null);
bInvokableType.flags = flags;
BInvokableTypeSymbol tsymbol = Symbols.createInvokableTypeSymbol(SymTag.FUNCTION_TYPE, flags,
env.enclPkg.symbol.pkgID, bInvokableType,
env.scope.owner, location, SOURCE);
tsymbol.params = null;
tsymbol.restParam = null;
tsymbol.returnType = null;
bInvokableType.tsymbol = tsymbol;
return bInvokableType;
}
for (BLangVariable paramNode : paramVars) {
BLangSimpleVariable param = (BLangSimpleVariable) paramNode;
Name paramName = names.fromIdNode(param.name);
Name paramOrigName = names.originalNameFromIdNode(param.name);
if (paramName != Names.EMPTY) {
if (paramNames.contains(paramName.value)) {
dlog.error(param.name.pos, DiagnosticErrorCode.REDECLARED_SYMBOL, paramName.value);
} else {
paramNames.add(paramName.value);
}
}
BType type = resolveTypeNode(param.getTypeNode(), env);
if (type == symTable.noType) {
return symTable.noType;
}
paramNode.setBType(type);
paramTypes.add(type);
long paramFlags = Flags.asMask(paramNode.flagSet);
BVarSymbol symbol = new BVarSymbol(paramFlags, paramName, paramOrigName, env.enclPkg.symbol.pkgID,
type, env.scope.owner, param.pos, SOURCE);
param.symbol = symbol;
if (param.expr != null) {
foundDefaultableParam = true;
symbol.isDefaultable = true;
symbol.flags |= Flags.OPTIONAL;
} else if (foundDefaultableParam) {
dlog.error(param.pos, DiagnosticErrorCode.REQUIRED_PARAM_DEFINED_AFTER_DEFAULTABLE_PARAM);
}
params.add(symbol);
}
BType retType = resolveTypeNode(retTypeVar, env);
if (retType == symTable.noType) {
return symTable.noType;
}
BVarSymbol restParam = null;
BType restType = null;
if (restVariable != null) {
restType = resolveTypeNode(restVariable.typeNode, env);
if (restType == symTable.noType) {
return symTable.noType;
}
BLangIdentifier id = ((BLangSimpleVariable) restVariable).name;
restVariable.setBType(restType);
restParam = new BVarSymbol(Flags.asMask(restVariable.flagSet),
names.fromIdNode(id), names.originalNameFromIdNode(id),
env.enclPkg.symbol.pkgID, restType, env.scope.owner, restVariable.pos, SOURCE);
}
BInvokableType bInvokableType = new BInvokableType(paramTypes, restType, retType, null);
bInvokableType.flags = flags;
BInvokableTypeSymbol tsymbol = Symbols.createInvokableTypeSymbol(SymTag.FUNCTION_TYPE, flags,
env.enclPkg.symbol.pkgID, bInvokableType,
env.scope.owner, location, SOURCE);
tsymbol.params = params;
tsymbol.restParam = restParam;
tsymbol.returnType = retType;
bInvokableType.tsymbol = tsymbol;
List<BType> allConstituentTypes = new ArrayList<>(paramTypes);
allConstituentTypes.add(restType);
allConstituentTypes.add(retType);
markParameterizedType(bInvokableType, allConstituentTypes);
return bInvokableType;
}
/**
* Lookup all the visible in-scope symbols for a given environment scope.
*
* @param env Symbol environment
* @return all the visible symbols
*/
public Map<Name, List<ScopeEntry>> getAllVisibleInScopeSymbols(SymbolEnv env) {
Map<Name, List<ScopeEntry>> visibleEntries = new HashMap<>();
env.scope.entries.forEach((key, value) -> {
ArrayList<ScopeEntry> entryList = new ArrayList<>();
entryList.add(value);
visibleEntries.put(key, entryList);
});
if (env.enclEnv != null) {
getAllVisibleInScopeSymbols(env.enclEnv).forEach((name, entryList) -> {
if (!visibleEntries.containsKey(name)) {
visibleEntries.put(name, entryList);
} else {
List<ScopeEntry> scopeEntries = visibleEntries.get(name);
entryList.forEach(scopeEntry -> {
if (!scopeEntries.contains(scopeEntry) && !isModuleLevelVar(scopeEntry.symbol)) {
scopeEntries.add(scopeEntry);
}
});
}
});
}
return visibleEntries;
}
public BSymbol getBinaryEqualityForTypeSets(OperatorKind opKind, BType lhsType, BType rhsType,
BLangBinaryExpr binaryExpr, SymbolEnv env) {
boolean validEqualityIntersectionExists;
switch (opKind) {
case EQUAL:
case NOT_EQUAL:
validEqualityIntersectionExists = types.validEqualityIntersectionExists(lhsType, rhsType);
break;
case REF_EQUAL:
case REF_NOT_EQUAL:
validEqualityIntersectionExists =
types.getTypeIntersection(Types.IntersectionContext.compilerInternalIntersectionTestContext(),
lhsType, rhsType, env) != symTable.semanticError;
break;
default:
return symTable.notFoundSymbol;
}
if (validEqualityIntersectionExists) {
if ((!types.isValueType(lhsType) && !types.isValueType(rhsType)) ||
(types.isValueType(lhsType) && types.isValueType(rhsType))) {
return createEqualityOperator(opKind, lhsType, rhsType);
} else {
types.setImplicitCastExpr(binaryExpr.rhsExpr, rhsType, symTable.anyType);
types.setImplicitCastExpr(binaryExpr.lhsExpr, lhsType, symTable.anyType);
switch (opKind) {
case REF_EQUAL:
return createEqualityOperator(OperatorKind.EQUAL, symTable.anyType,
symTable.anyType);
case REF_NOT_EQUAL:
return createEqualityOperator(OperatorKind.NOT_EQUAL, symTable.anyType,
symTable.anyType);
default:
return createEqualityOperator(opKind, symTable.anyType, symTable.anyType);
}
}
}
return symTable.notFoundSymbol;
}
public BSymbol getBitwiseShiftOpsForTypeSets(OperatorKind opKind, BType lhsType, BType rhsType) {
boolean validIntTypesExists;
switch (opKind) {
case BITWISE_LEFT_SHIFT:
case BITWISE_RIGHT_SHIFT:
case BITWISE_UNSIGNED_RIGHT_SHIFT:
validIntTypesExists = types.validIntegerTypeExists(lhsType) && types.validIntegerTypeExists(rhsType);
break;
default:
return symTable.notFoundSymbol;
}
if (validIntTypesExists) {
switch (opKind) {
case BITWISE_LEFT_SHIFT:
return createBinaryOperator(opKind, lhsType, rhsType, symTable.intType);
case BITWISE_RIGHT_SHIFT:
case BITWISE_UNSIGNED_RIGHT_SHIFT:
switch (lhsType.tag) {
case TypeTags.UNSIGNED32_INT:
case TypeTags.UNSIGNED16_INT:
case TypeTags.UNSIGNED8_INT:
case TypeTags.BYTE:
return createBinaryOperator(opKind, lhsType, rhsType, lhsType);
default:
return createBinaryOperator(opKind, lhsType, rhsType, symTable.intType);
}
}
}
return symTable.notFoundSymbol;
}
public BSymbol getArithmeticOpsForTypeSets(OperatorKind opKind, BType lhsType, BType rhsType) {
boolean validNumericOrStringTypeExists;
switch (opKind) {
case ADD:
validNumericOrStringTypeExists = (types.validNumericTypeExists(lhsType) &&
types.validNumericTypeExists(rhsType)) || (types.validStringOrXmlTypeExists(lhsType) &&
types.validStringOrXmlTypeExists(rhsType));
break;
case SUB:
case DIV:
case MUL:
case MOD:
validNumericOrStringTypeExists = types.validNumericTypeExists(lhsType) &&
types.validNumericTypeExists(rhsType);
break;
default:
return symTable.notFoundSymbol;
}
if (validNumericOrStringTypeExists) {
BType compatibleType1 = types.findCompatibleType(lhsType);
BType compatibleType2 = types.findCompatibleType(rhsType);
if (types.isBasicNumericType(compatibleType1) && compatibleType1 != compatibleType2) {
return symTable.notFoundSymbol;
}
if (compatibleType1.tag < compatibleType2.tag) {
return createBinaryOperator(opKind, lhsType, rhsType, compatibleType2);
}
return createBinaryOperator(opKind, lhsType, rhsType, compatibleType1);
}
return symTable.notFoundSymbol;
}
public BSymbol getBinaryBitwiseOpsForTypeSets(OperatorKind opKind, BType lhsType, BType rhsType) {
boolean validIntTypesExists;
switch (opKind) {
case BITWISE_AND:
case BITWISE_OR:
case BITWISE_XOR:
validIntTypesExists = types.validIntegerTypeExists(lhsType) && types.validIntegerTypeExists(rhsType);
break;
default:
return symTable.notFoundSymbol;
}
if (validIntTypesExists) {
switch (opKind) {
case BITWISE_AND:
switch (lhsType.tag) {
case TypeTags.UNSIGNED8_INT:
case TypeTags.BYTE:
case TypeTags.UNSIGNED16_INT:
case TypeTags.UNSIGNED32_INT:
return createBinaryOperator(opKind, lhsType, rhsType, lhsType);
}
switch (rhsType.tag) {
case TypeTags.UNSIGNED8_INT:
case TypeTags.BYTE:
case TypeTags.UNSIGNED16_INT:
case TypeTags.UNSIGNED32_INT:
return createBinaryOperator(opKind, lhsType, rhsType, rhsType);
}
return createBinaryOperator(opKind, lhsType, rhsType, symTable.intType);
case BITWISE_OR:
case BITWISE_XOR:
return createBinaryOperator(opKind, lhsType, rhsType, symTable.intType);
}
}
return symTable.notFoundSymbol;
}
/**
* Define binary comparison operator for valid ordered types.
*
* @param opKind Binary operator kind
* @param lhsType Type of the left hand side value
* @param rhsType Type of the right hand side value
* @return <, <=, >, or >= symbol
*/
public BSymbol getBinaryComparisonOpForTypeSets(OperatorKind opKind, BType lhsType, BType rhsType) {
boolean validOrderedTypesExist;
switch (opKind) {
case LESS_THAN:
case LESS_EQUAL:
case GREATER_THAN:
case GREATER_EQUAL:
validOrderedTypesExist = types.isOrderedType(lhsType, false) &&
types.isOrderedType(rhsType, false) && types.isSameOrderedType(lhsType, rhsType);
break;
default:
return symTable.notFoundSymbol;
}
if (validOrderedTypesExist) {
switch (opKind) {
case LESS_THAN:
return createBinaryComparisonOperator(OperatorKind.LESS_THAN, lhsType, rhsType);
case LESS_EQUAL:
return createBinaryComparisonOperator(OperatorKind.LESS_EQUAL, lhsType, rhsType);
case GREATER_THAN:
return createBinaryComparisonOperator(OperatorKind.GREATER_THAN, lhsType, rhsType);
default:
return createBinaryComparisonOperator(OperatorKind.GREATER_EQUAL, lhsType, rhsType);
}
}
return symTable.notFoundSymbol;
}
public boolean isBinaryShiftOperator(OperatorKind binaryOpKind) {
return binaryOpKind == OperatorKind.BITWISE_LEFT_SHIFT ||
binaryOpKind == OperatorKind.BITWISE_RIGHT_SHIFT ||
binaryOpKind == OperatorKind.BITWISE_UNSIGNED_RIGHT_SHIFT;
}
public boolean isArithmeticOperator(OperatorKind binaryOpKind) {
return binaryOpKind == OperatorKind.ADD || binaryOpKind == OperatorKind.SUB ||
binaryOpKind == OperatorKind.DIV || binaryOpKind == OperatorKind.MUL ||
binaryOpKind == OperatorKind.MOD;
}
public boolean isBinaryComparisonOperator(OperatorKind binaryOpKind) {
return binaryOpKind == OperatorKind.LESS_THAN ||
binaryOpKind == OperatorKind.LESS_EQUAL || binaryOpKind == OperatorKind.GREATER_THAN ||
binaryOpKind == OperatorKind.GREATER_EQUAL;
}
public boolean markParameterizedType(BType type, BType constituentType) {
if (Symbols.isFlagOn(constituentType.flags, Flags.PARAMETERIZED)) {
type.tsymbol.flags |= Flags.PARAMETERIZED;
type.flags |= Flags.PARAMETERIZED;
return true;
}
return false;
}
public void markParameterizedType(BType enclosingType, Collection<BType> constituentTypes) {
if (Symbols.isFlagOn(enclosingType.flags, Flags.PARAMETERIZED)) {
return;
}
for (BType type : constituentTypes) {
if (type == null) {
continue;
}
if (markParameterizedType(enclosingType, type)) {
break;
}
}
}
private BSymbol resolveOperator(ScopeEntry entry, List<BType> types) {
BSymbol foundSymbol = symTable.notFoundSymbol;
while (entry != NOT_FOUND_ENTRY) {
BInvokableType opType = (BInvokableType) entry.symbol.type;
if (types.size() == opType.paramTypes.size()) {
boolean match = true;
for (int i = 0; i < types.size(); i++) {
if (types.get(i).tag != opType.paramTypes.get(i).tag) {
match = false;
}
}
if (match) {
foundSymbol = entry.symbol;
break;
}
}
entry = entry.next;
}
return foundSymbol;
}
private void visitBuiltInTypeNode(BLangType typeNode, TypeKind typeKind, SymbolEnv env) {
Name typeName = names.fromTypeKind(typeKind);
BSymbol typeSymbol = lookupMemberSymbol(typeNode.pos, symTable.rootScope,
env, typeName, SymTag.TYPE);
if (typeSymbol == symTable.notFoundSymbol) {
dlog.error(typeNode.pos, diagCode, typeName);
}
typeNode.setBType(typeSymbol.type);
resultType = typeSymbol.type;
}
private void addNamespacesInScope(Map<Name, BXMLNSSymbol> namespaces, SymbolEnv env) {
if (env == null) {
return;
}
env.scope.entries.forEach((name, scopeEntry) -> {
if (scopeEntry.symbol.kind == SymbolKind.XMLNS) {
BXMLNSSymbol nsSymbol = (BXMLNSSymbol) scopeEntry.symbol;
if (!namespaces.containsKey(name)) {
namespaces.put(name, nsSymbol);
}
}
});
addNamespacesInScope(namespaces, env.enclEnv);
}
private boolean isMemberAccessAllowed(SymbolEnv env, BSymbol symbol) {
if (Symbols.isPublic(symbol)) {
return true;
}
if (!Symbols.isPrivate(symbol)) {
return env.enclPkg.symbol.pkgID == symbol.pkgID;
}
if (env.enclType != null) {
return env.enclType.getBType().tsymbol == symbol.owner;
}
return isMemberAllowed(env, symbol);
}
private boolean isMemberAllowed(SymbolEnv env, BSymbol symbol) {
return env != null && (env.enclInvokable != null
&& env.enclInvokable.symbol.receiverSymbol != null
&& env.enclInvokable.symbol.receiverSymbol.type.tsymbol == symbol.owner
|| isMemberAllowed(env.enclEnv, symbol));
}
private BType computeIntersectionType(BLangIntersectionTypeNode intersectionTypeNode) {
List<BLangType> constituentTypeNodes = intersectionTypeNode.constituentTypeNodes;
Map<BType, BLangType> typeBLangTypeMap = new HashMap<>();
boolean validIntersection = true;
boolean isErrorIntersection = false;
boolean isAlreadyExistingType = false;
BLangType bLangTypeOne = constituentTypeNodes.get(0);
BType typeOne = resolveTypeNode(bLangTypeOne, env);
if (typeOne == symTable.noType) {
return symTable.noType;
}
typeBLangTypeMap.put(typeOne, bLangTypeOne);
BLangType bLangTypeTwo = constituentTypeNodes.get(1);
BType typeTwo = resolveTypeNode(bLangTypeTwo, env);
if (typeTwo == symTable.noType) {
return symTable.noType;
}
typeBLangTypeMap.put(typeTwo, bLangTypeTwo);
boolean hasReadOnlyType = typeOne == symTable.readonlyType || typeTwo == symTable.readonlyType;
if (typeOne.tag == TypeTags.ERROR || typeTwo.tag == TypeTags.ERROR) {
isErrorIntersection = true;
}
if (!(hasReadOnlyType || isErrorIntersection)) {
dlog.error(intersectionTypeNode.pos,
DiagnosticErrorCode.UNSUPPORTED_TYPE_INTERSECTION, intersectionTypeNode);
return symTable.semanticError;
}
BType potentialIntersectionType = getPotentialIntersection(
Types.IntersectionContext.from(dlog, bLangTypeOne.pos, bLangTypeTwo.pos),
typeOne, typeTwo, this.env);
if (typeOne == potentialIntersectionType || typeTwo == potentialIntersectionType) {
isAlreadyExistingType = true;
}
LinkedHashSet<BType> constituentBTypes = new LinkedHashSet<>();
constituentBTypes.add(typeOne);
constituentBTypes.add(typeTwo);
if (potentialIntersectionType == symTable.semanticError) {
validIntersection = false;
} else {
for (int i = 2; i < constituentTypeNodes.size(); i++) {
BLangType bLangType = constituentTypeNodes.get(i);
BType type = resolveTypeNode(bLangType, env);
if (type.tag == TypeTags.ERROR) {
isErrorIntersection = true;
}
typeBLangTypeMap.put(type, bLangType);
if (!hasReadOnlyType) {
hasReadOnlyType = type == symTable.readonlyType;
}
if (type == symTable.noType) {
return symTable.noType;
}
BType tempIntersectionType = getPotentialIntersection(
Types.IntersectionContext.from(dlog, bLangTypeOne.pos, bLangTypeTwo.pos),
potentialIntersectionType, type, this.env);
if (tempIntersectionType == symTable.semanticError) {
validIntersection = false;
break;
}
if (type == tempIntersectionType) {
potentialIntersectionType = type;
isAlreadyExistingType = true;
} else if (potentialIntersectionType != tempIntersectionType) {
potentialIntersectionType = tempIntersectionType;
isAlreadyExistingType = false;
}
constituentBTypes.add(type);
}
}
if (!validIntersection) {
dlog.error(intersectionTypeNode.pos, DiagnosticErrorCode.INVALID_INTERSECTION_TYPE, intersectionTypeNode);
return symTable.semanticError;
}
if (isErrorIntersection && !isAlreadyExistingType) {
BType detailType = ((BErrorType) potentialIntersectionType).detailType;
boolean existingErrorDetailType = false;
if (detailType.tsymbol != null) {
BSymbol detailTypeSymbol = lookupSymbolInMainSpace(env, detailType.tsymbol.name);
if (detailTypeSymbol != symTable.notFoundSymbol) {
existingErrorDetailType = true;
}
}
return defineIntersectionType((BErrorType) potentialIntersectionType, intersectionTypeNode.pos,
constituentBTypes, existingErrorDetailType, env);
}
if (types.isInherentlyImmutableType(potentialIntersectionType) ||
(Symbols.isFlagOn(potentialIntersectionType.flags, Flags.READONLY) &&
!types.isSubTypeOfBaseType(potentialIntersectionType, TypeTags.OBJECT))) {
return potentialIntersectionType;
}
if (!types.isSelectivelyImmutableType(potentialIntersectionType, false)) {
if (types.isSelectivelyImmutableType(potentialIntersectionType)) {
dlog.error(intersectionTypeNode.pos, DiagnosticErrorCode.INVALID_READONLY_OBJECT_INTERSECTION_TYPE);
} else {
dlog.error(intersectionTypeNode.pos, DiagnosticErrorCode.INVALID_READONLY_INTERSECTION_TYPE,
potentialIntersectionType);
}
return symTable.semanticError;
}
BLangType typeNode = typeBLangTypeMap.get(potentialIntersectionType);
Set<Flag> flagSet;
if (typeNode == null) {
flagSet = new HashSet<>();
} else if (typeNode.getKind() == NodeKind.OBJECT_TYPE) {
flagSet = ((BLangObjectTypeNode) typeNode).flagSet;
} else if (typeNode.getKind() == NodeKind.USER_DEFINED_TYPE) {
flagSet = ((BLangUserDefinedType) typeNode).flagSet;
} else {
flagSet = new HashSet<>();
}
return ImmutableTypeCloner.getImmutableIntersectionType(intersectionTypeNode.pos, types,
(SelectivelyImmutableReferenceType)
potentialIntersectionType,
env, symTable, anonymousModelHelper, names, flagSet);
}
private BIntersectionType defineIntersectionType(BErrorType intersectionErrorType,
Location pos,
LinkedHashSet<BType> constituentBTypes,
boolean isAlreadyDefinedDetailType, SymbolEnv env) {
BSymbol owner = intersectionErrorType.tsymbol.owner;
PackageID pkgId = intersectionErrorType.tsymbol.pkgID;
SymbolEnv pkgEnv = symTable.pkgEnvMap.get(env.enclPkg.symbol);
if (!isAlreadyDefinedDetailType && intersectionErrorType.detailType.tag == TypeTags.RECORD) {
defineErrorDetailRecord((BRecordType) intersectionErrorType.detailType, pos, pkgEnv);
}
return defineErrorIntersectionType(intersectionErrorType, constituentBTypes, pkgId, owner);
}
private BLangTypeDefinition defineErrorDetailRecord(BRecordType detailRecord, Location pos, SymbolEnv env) {
BRecordTypeSymbol detailRecordSymbol = (BRecordTypeSymbol) detailRecord.tsymbol;
for (BField field : detailRecord.fields.values()) {
BVarSymbol fieldSymbol = field.symbol;
detailRecordSymbol.scope.define(fieldSymbol.name, fieldSymbol);
}
BLangRecordTypeNode detailRecordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(new ArrayList<>(),
detailRecord, pos);
TypeDefBuilderHelper.createInitFunctionForRecordType(detailRecordTypeNode, env, names, symTable);
BLangTypeDefinition detailRecordTypeDefinition = TypeDefBuilderHelper.addTypeDefinition(detailRecord,
detailRecordSymbol,
detailRecordTypeNode,
env);
detailRecordTypeDefinition.pos = pos;
return detailRecordTypeDefinition;
}
private BIntersectionType defineErrorIntersectionType(IntersectableReferenceType effectiveType,
LinkedHashSet<BType> constituentBTypes, PackageID pkgId,
BSymbol owner) {
BTypeSymbol intersectionTypeSymbol = Symbols.createTypeSymbol(SymTag.INTERSECTION_TYPE,
Flags.asMask(EnumSet.of(Flag.PUBLIC)),
Names.EMPTY, pkgId, null, owner,
symTable.builtinPos, VIRTUAL);
BIntersectionType intersectionType = new BIntersectionType(intersectionTypeSymbol, constituentBTypes,
effectiveType);
intersectionTypeSymbol.type = intersectionType;
return intersectionType;
}
private BType getPotentialIntersection(Types.IntersectionContext intersectionContext,
BType lhsType, BType rhsType, SymbolEnv env) {
if (lhsType == symTable.readonlyType) {
return rhsType;
}
if (rhsType == symTable.readonlyType) {
return lhsType;
}
return types.getTypeIntersection(intersectionContext, lhsType, rhsType, env);
}
boolean validateInferTypedescParams(Location pos, List<? extends BLangVariable> parameters, BType retType) {
int inferTypedescParamCount = 0;
BVarSymbol paramWithInferredTypedescDefault = null;
Location inferDefaultLocation = null;
for (BLangVariable parameter : parameters) {
BType type = parameter.getBType();
BLangExpression expr = parameter.expr;
if (type != null && type.tag == TypeTags.TYPEDESC && expr != null &&
expr.getKind() == NodeKind.INFER_TYPEDESC_EXPR) {
paramWithInferredTypedescDefault = parameter.symbol;
inferDefaultLocation = expr.pos;
inferTypedescParamCount++;
}
}
if (inferTypedescParamCount > 1) {
dlog.error(pos, DiagnosticErrorCode.MULTIPLE_INFER_TYPEDESC_PARAMS);
return false;
}
if (paramWithInferredTypedescDefault == null) {
return true;
}
if (retType == null) {
dlog.error(inferDefaultLocation,
DiagnosticErrorCode.CANNOT_USE_INFERRED_TYPEDESC_DEFAULT_WITH_UNREFERENCED_PARAM);
return false;
}
if (unifier.refersInferableParamName(paramWithInferredTypedescDefault.name.value, retType)) {
return true;
}
dlog.error(inferDefaultLocation,
DiagnosticErrorCode.CANNOT_USE_INFERRED_TYPEDESC_DEFAULT_WITH_UNREFERENCED_PARAM);
return false;
}
private boolean isModuleLevelVar(BSymbol symbol) {
return symbol.getKind() == SymbolKind.VARIABLE && symbol.owner.getKind() == SymbolKind.PACKAGE;
}
public Set<BVarSymbol> getConfigVarSymbolsIncludingImportedModules(BPackageSymbol packageSymbol) {
Set<BVarSymbol> configVars = new HashSet<>();
populateConfigurableVars(packageSymbol, configVars);
if (!packageSymbol.imports.isEmpty()) {
for (BPackageSymbol importSymbol : packageSymbol.imports) {
populateConfigurableVars(importSymbol, configVars);
}
}
return configVars;
}
private void populateConfigurableVars(BPackageSymbol pkgSymbol, Set<BVarSymbol> configVars) {
for (Scope.ScopeEntry entry : pkgSymbol.scope.entries.values()) {
BSymbol symbol = entry.symbol;
if (symbol != null && symbol.tag == SymTag.VARIABLE && Symbols.isFlagOn(symbol.flags, Flags.CONFIGURABLE)) {
configVars.add((BVarSymbol) symbol);
}
}
}
private static class ParameterizedTypeInfo {
BType paramValueType;
int index = -1;
private ParameterizedTypeInfo(BType paramValueType) {
this.paramValueType = paramValueType;
}
private ParameterizedTypeInfo(BType paramValueType, int index) {
this.paramValueType = paramValueType;
this.index = index;
}
}
} |
the contains and add operations of hashset is time-consuming. | private void collectTableStatisticsAndCacheIcebergSplit(Table table, ScalarOperator predicate, long limit, Tracers tracers) {
IcebergTable icebergTable = (IcebergTable) table;
Optional<Snapshot> snapshot = icebergTable.getSnapshot();
if (!snapshot.isPresent()) {
return;
}
long snapshotId = snapshot.get().snapshotId();
String dbName = icebergTable.getRemoteDbName();
String tableName = icebergTable.getRemoteTableName();
IcebergFilter key = IcebergFilter.of(dbName, tableName, snapshotId, predicate);
org.apache.iceberg.Table nativeTbl = icebergTable.getNativeTable();
Types.StructType schema = nativeTbl.schema().asStruct();
Map<String, MetricsModes.MetricsMode> fieldToMetricsMode = getIcebergMetricsConfig(icebergTable);
if (!fieldToMetricsMode.isEmpty()) {
Tracers.record(Tracers.Module.EXTERNAL, "ICEBERG.MetricsConfig." + nativeTbl + ".write_metrics_mode_default",
DEFAULT_WRITE_METRICS_MODE_DEFAULT);
Tracers.record(Tracers.Module.EXTERNAL, "ICEBERG.MetricsConfig." + nativeTbl + ".non-default.size",
String.valueOf(fieldToMetricsMode.size()));
Tracers.record(Tracers.Module.EXTERNAL, "ICEBERG.MetricsConfig." + nativeTbl + ".non-default.columns",
fieldToMetricsMode.toString());
}
List<ScalarOperator> scalarOperators = Utils.extractConjuncts(predicate);
ScalarOperatorToIcebergExpr.IcebergContext icebergContext = new ScalarOperatorToIcebergExpr.IcebergContext(schema);
Expression icebergPredicate = new ScalarOperatorToIcebergExpr().convert(scalarOperators, icebergContext);
TableScan scan = nativeTbl.newScan()
.useSnapshot(snapshotId)
.metricsReporter(metricsReporter)
.planWith(jobPlanningExecutor);
if (enableCollectColumnStatistics()) {
scan = scan.includeColumnStats();
}
if (icebergPredicate.op() != Expression.Operation.TRUE) {
scan = scan.filter(icebergPredicate);
}
CloseableIterable<FileScanTask> fileScanTaskIterable = TableScanUtil.splitFiles(
scan.planFiles(), scan.targetSplitSize());
CloseableIterator<FileScanTask> fileScanTaskIterator = fileScanTaskIterable.iterator();
Iterator<FileScanTask> fileScanTasks;
boolean canPruneManifests = limit != -1 && !icebergTable.isV2Format() && onlyHasPartitionPredicate(table, predicate)
&& limit < Integer.MAX_VALUE && nativeTbl.spec().specId() == 0 && enablePruneManifest();
if (canPruneManifests) {
fileScanTasks = Iterators.limit(fileScanTaskIterator, (int) limit);
} else {
fileScanTasks = fileScanTaskIterator;
}
List<FileScanTask> icebergScanTasks = Lists.newArrayList();
long totalReadCount = 0;
Set<String> filePaths = new HashSet<>();
while (fileScanTasks.hasNext()) {
FileScanTask scanTask = fileScanTasks.next();
FileScanTask icebergSplitScanTask = scanTask;
if (enableCollectColumnStatistics()) {
try (Timer ignored = Tracers.watchScope(EXTERNAL, "ICEBERG.buildSplitScanTask")) {
icebergSplitScanTask = buildIcebergSplitScanTask(scanTask, icebergPredicate, key);
}
List<Types.NestedField> fullColumns = nativeTbl.schema().columns();
Map<Integer, Type.PrimitiveType> idToTypeMapping = fullColumns.stream()
.filter(column -> column.type().isPrimitiveType())
.collect(Collectors.toMap(Types.NestedField::fieldId, column -> column.type().asPrimitiveType()));
Set<Integer> identityPartitionIds = nativeTbl.spec().fields().stream()
.filter(x -> x.transform().isIdentity())
.map(PartitionField::sourceId)
.collect(Collectors.toSet());
List<Types.NestedField> nonPartitionPrimitiveColumns = fullColumns.stream()
.filter(column -> !identityPartitionIds.contains(column.fieldId()) &&
column.type().isPrimitiveType())
.collect(toImmutableList());
try (Timer ignored = Tracers.watchScope(EXTERNAL, "ICEBERG.updateIcebergFileStats")) {
statisticProvider.updateIcebergFileStats(
icebergTable, scanTask, idToTypeMapping, nonPartitionPrimitiveColumns, key);
}
}
icebergScanTasks.add(icebergSplitScanTask);
if (canPruneManifests) {
String filePath = icebergSplitScanTask.file().path().toString();
if (!filePaths.contains(filePath)) {
filePaths.add(filePath);
totalReadCount += scanTask.file().recordCount();
}
if (totalReadCount >= limit) {
break;
}
}
}
try {
fileScanTaskIterable.close();
fileScanTaskIterator.close();
} catch (IOException e) {
}
Optional<ScanReport> metrics = metricsReporter.getReporter(catalogName, dbName, tableName, snapshotId, icebergPredicate);
if (metrics.isPresent()) {
Tracers.Module module = Tracers.Module.EXTERNAL;
String name = "ICEBERG.ScanMetrics." + metrics.get().tableName() + "[" + icebergPredicate + "]";
String value = metrics.get().scanMetrics().toString();
if (tracers == null) {
Tracers.record(module, name, value);
} else {
synchronized (this) {
Tracers.record(tracers, module, name, value);
}
}
}
splitTasks.put(key, icebergScanTasks);
scannedTables.add(key);
} | String filePath = icebergSplitScanTask.file().path().toString(); | private void collectTableStatisticsAndCacheIcebergSplit(Table table, ScalarOperator predicate, long limit, Tracers tracers) {
IcebergTable icebergTable = (IcebergTable) table;
Optional<Snapshot> snapshot = icebergTable.getSnapshot();
if (!snapshot.isPresent()) {
return;
}
long snapshotId = snapshot.get().snapshotId();
String dbName = icebergTable.getRemoteDbName();
String tableName = icebergTable.getRemoteTableName();
IcebergFilter key = IcebergFilter.of(dbName, tableName, snapshotId, predicate);
org.apache.iceberg.Table nativeTbl = icebergTable.getNativeTable();
Types.StructType schema = nativeTbl.schema().asStruct();
Map<String, MetricsModes.MetricsMode> fieldToMetricsMode = getIcebergMetricsConfig(icebergTable);
if (!fieldToMetricsMode.isEmpty()) {
Tracers.record(Tracers.Module.EXTERNAL, "ICEBERG.MetricsConfig." + nativeTbl + ".write_metrics_mode_default",
DEFAULT_WRITE_METRICS_MODE_DEFAULT);
Tracers.record(Tracers.Module.EXTERNAL, "ICEBERG.MetricsConfig." + nativeTbl + ".non-default.size",
String.valueOf(fieldToMetricsMode.size()));
Tracers.record(Tracers.Module.EXTERNAL, "ICEBERG.MetricsConfig." + nativeTbl + ".non-default.columns",
fieldToMetricsMode.toString());
}
List<ScalarOperator> scalarOperators = Utils.extractConjuncts(predicate);
ScalarOperatorToIcebergExpr.IcebergContext icebergContext = new ScalarOperatorToIcebergExpr.IcebergContext(schema);
Expression icebergPredicate = new ScalarOperatorToIcebergExpr().convert(scalarOperators, icebergContext);
TableScan scan = nativeTbl.newScan()
.useSnapshot(snapshotId)
.metricsReporter(metricsReporter)
.planWith(jobPlanningExecutor);
if (enableCollectColumnStatistics()) {
scan = scan.includeColumnStats();
}
if (icebergPredicate.op() != Expression.Operation.TRUE) {
scan = scan.filter(icebergPredicate);
}
CloseableIterable<FileScanTask> fileScanTaskIterable = TableScanUtil.splitFiles(
scan.planFiles(), scan.targetSplitSize());
CloseableIterator<FileScanTask> fileScanTaskIterator = fileScanTaskIterable.iterator();
Iterator<FileScanTask> fileScanTasks;
boolean canPruneManifests = limit != -1 && !icebergTable.isV2Format() && onlyHasPartitionPredicate(table, predicate)
&& limit < Integer.MAX_VALUE && nativeTbl.spec().specId() == 0 && enablePruneManifest();
if (canPruneManifests) {
fileScanTasks = Iterators.limit(fileScanTaskIterator, (int) limit);
} else {
fileScanTasks = fileScanTaskIterator;
}
List<FileScanTask> icebergScanTasks = Lists.newArrayList();
long totalReadCount = 0;
Set<String> filePaths = new HashSet<>();
while (fileScanTasks.hasNext()) {
FileScanTask scanTask = fileScanTasks.next();
FileScanTask icebergSplitScanTask = scanTask;
if (enableCollectColumnStatistics()) {
try (Timer ignored = Tracers.watchScope(EXTERNAL, "ICEBERG.buildSplitScanTask")) {
icebergSplitScanTask = buildIcebergSplitScanTask(scanTask, icebergPredicate, key);
}
List<Types.NestedField> fullColumns = nativeTbl.schema().columns();
Map<Integer, Type.PrimitiveType> idToTypeMapping = fullColumns.stream()
.filter(column -> column.type().isPrimitiveType())
.collect(Collectors.toMap(Types.NestedField::fieldId, column -> column.type().asPrimitiveType()));
Set<Integer> identityPartitionIds = nativeTbl.spec().fields().stream()
.filter(x -> x.transform().isIdentity())
.map(PartitionField::sourceId)
.collect(Collectors.toSet());
List<Types.NestedField> nonPartitionPrimitiveColumns = fullColumns.stream()
.filter(column -> !identityPartitionIds.contains(column.fieldId()) &&
column.type().isPrimitiveType())
.collect(toImmutableList());
try (Timer ignored = Tracers.watchScope(EXTERNAL, "ICEBERG.updateIcebergFileStats")) {
statisticProvider.updateIcebergFileStats(
icebergTable, scanTask, idToTypeMapping, nonPartitionPrimitiveColumns, key);
}
}
icebergScanTasks.add(icebergSplitScanTask);
if (canPruneManifests) {
String filePath = icebergSplitScanTask.file().path().toString();
if (!filePaths.contains(filePath)) {
filePaths.add(filePath);
totalReadCount += scanTask.file().recordCount();
}
if (totalReadCount >= limit) {
break;
}
}
}
try {
fileScanTaskIterable.close();
fileScanTaskIterator.close();
} catch (IOException e) {
}
Optional<ScanReport> metrics = metricsReporter.getReporter(catalogName, dbName, tableName, snapshotId, icebergPredicate);
if (metrics.isPresent()) {
Tracers.Module module = Tracers.Module.EXTERNAL;
String name = "ICEBERG.ScanMetrics." + metrics.get().tableName() + "[" + icebergPredicate + "]";
String value = metrics.get().scanMetrics().toString();
if (tracers == null) {
Tracers.record(module, name, value);
} else {
synchronized (this) {
Tracers.record(tracers, module, name, value);
}
}
}
splitTasks.put(key, icebergScanTasks);
scannedTables.add(key);
} | class IcebergMetadata implements ConnectorMetadata {
private static final Logger LOG = LogManager.getLogger(IcebergMetadata.class);
public static final String LOCATION_PROPERTY = "location";
public static final String FILE_FORMAT = "file_format";
public static final String COMPRESSION_CODEC = "compression_codec";
public static final String COMMENT = "comment";
private final String catalogName;
private final HdfsEnvironment hdfsEnvironment;
private final IcebergCatalog icebergCatalog;
private final IcebergStatisticProvider statisticProvider = new IcebergStatisticProvider();
private final Map<TableIdentifier, Table> tables = new ConcurrentHashMap<>();
private final Map<String, Database> databases = new ConcurrentHashMap<>();
private final Map<IcebergFilter, List<FileScanTask>> splitTasks = new ConcurrentHashMap<>();
private final Set<IcebergFilter> scannedTables = new HashSet<>();
private final Set<IcebergFilter> preparedTables = ConcurrentHashMap.newKeySet();
private final Map<FileScanTaskSchema, Pair<String, String>> fileScanTaskSchemas = new ConcurrentHashMap<>();
private final ExecutorService jobPlanningExecutor;
private final ExecutorService refreshOtherFeExecutor;
private final IcebergMetricsReporter metricsReporter;
public IcebergMetadata(String catalogName, HdfsEnvironment hdfsEnvironment, IcebergCatalog icebergCatalog,
ExecutorService jobPlanningExecutor, ExecutorService refreshOtherFeExecutor) {
this.catalogName = catalogName;
this.hdfsEnvironment = hdfsEnvironment;
this.icebergCatalog = icebergCatalog;
this.metricsReporter = new IcebergMetricsReporter();
this.jobPlanningExecutor = jobPlanningExecutor;
this.refreshOtherFeExecutor = refreshOtherFeExecutor;
}
@Override
public List<String> listDbNames() {
return icebergCatalog.listAllDatabases();
}
@Override
public void createDb(String dbName, Map<String, String> properties) throws AlreadyExistsException {
if (dbExists(dbName)) {
throw new AlreadyExistsException("Database Already Exists");
}
icebergCatalog.createDb(dbName, properties);
}
@Override
public void dropDb(String dbName, boolean isForceDrop) throws MetaNotFoundException {
if (listTableNames(dbName).size() != 0) {
throw new StarRocksConnectorException("Database %s not empty", dbName);
}
icebergCatalog.dropDb(dbName);
databases.remove(dbName);
}
@Override
public Database getDb(String dbName) {
if (databases.containsKey(dbName)) {
return databases.get(dbName);
}
Database db;
try {
db = icebergCatalog.getDB(dbName);
} catch (NoSuchNamespaceException e) {
LOG.error("Database {} not found", dbName, e);
return null;
}
databases.put(dbName, db);
return db;
}
@Override
public List<String> listTableNames(String dbName) {
return icebergCatalog.listTables(dbName);
}
@Override
public boolean createTable(CreateTableStmt stmt) throws DdlException {
String dbName = stmt.getDbName();
String tableName = stmt.getTableName();
Schema schema = toIcebergApiSchema(stmt.getColumns());
PartitionDesc partitionDesc = stmt.getPartitionDesc();
List<String> partitionColNames = partitionDesc == null ? Lists.newArrayList() :
((ListPartitionDesc) partitionDesc).getPartitionColNames();
PartitionSpec partitionSpec = parsePartitionFields(schema, partitionColNames);
Map<String, String> properties = stmt.getProperties() == null ? new HashMap<>() : stmt.getProperties();
String tableLocation = properties.get(LOCATION_PROPERTY);
properties.put(COMMENT, stmt.getComment());
Map<String, String> createTableProperties = IcebergApiConverter.rebuildCreateTableProperties(properties);
return icebergCatalog.createTable(dbName, tableName, schema, partitionSpec, tableLocation, createTableProperties);
}
@Override
public void alterTable(AlterTableStmt stmt) throws UserException {
String dbName = stmt.getDbName();
String tableName = stmt.getTableName();
org.apache.iceberg.Table table = icebergCatalog.getTable(dbName, tableName);
if (table == null) {
throw new StarRocksConnectorException(
"Failed to load iceberg table: " + stmt.getTbl().toString());
}
IcebergAlterTableExecutor executor = new IcebergAlterTableExecutor(stmt, table, icebergCatalog);
executor.execute();
synchronized (this) {
tables.remove(TableIdentifier.of(dbName, tableName));
try {
icebergCatalog.refreshTable(dbName, tableName, jobPlanningExecutor);
} catch (Exception exception) {
LOG.error("Failed to refresh caching iceberg table.");
icebergCatalog.invalidateCache(new CachingIcebergCatalog.IcebergTableName(dbName, tableName));
}
asyncRefreshOthersFeMetadataCache(dbName, tableName);
}
}
@Override
public void dropTable(DropTableStmt stmt) {
Table icebergTable = getTable(stmt.getDbName(), stmt.getTableName());
if (icebergTable == null) {
return;
}
icebergCatalog.dropTable(stmt.getDbName(), stmt.getTableName(), stmt.isForceDrop());
tables.remove(TableIdentifier.of(stmt.getDbName(), stmt.getTableName()));
StatisticUtils.dropStatisticsAfterDropTable(icebergTable);
asyncRefreshOthersFeMetadataCache(stmt.getDbName(), stmt.getTableName());
}
@Override
public Table getTable(String dbName, String tblName) {
TableIdentifier identifier = TableIdentifier.of(dbName, tblName);
if (tables.containsKey(identifier)) {
return tables.get(identifier);
}
try {
IcebergCatalogType catalogType = icebergCatalog.getIcebergCatalogType();
org.apache.iceberg.Table icebergTable = icebergCatalog.getTable(dbName, tblName);
Table table = IcebergApiConverter.toIcebergTable(icebergTable, catalogName, dbName, tblName, catalogType.name());
table.setComment(icebergTable.properties().getOrDefault(COMMENT, ""));
tables.put(identifier, table);
return table;
} catch (StarRocksConnectorException | NoSuchTableException e) {
LOG.error("Failed to get iceberg table {}", identifier, e);
return null;
}
}
@Override
public boolean tableExists(String dbName, String tblName) {
return icebergCatalog.tableExists(dbName, tblName);
}
@Override
public List<String> listPartitionNames(String dbName, String tblName) {
IcebergCatalogType nativeType = icebergCatalog.getIcebergCatalogType();
if (nativeType != HIVE_CATALOG && nativeType != REST_CATALOG && nativeType != GLUE_CATALOG) {
throw new StarRocksConnectorException(
"Do not support get partitions from catalog type: " + nativeType);
}
return icebergCatalog.listPartitionNames(dbName, tblName, jobPlanningExecutor);
}
@Override
public List<RemoteFileInfo> getRemoteFileInfos(Table table, List<PartitionKey> partitionKeys,
long snapshotId, ScalarOperator predicate,
List<String> fieldNames, long limit) {
return getRemoteFileInfos((IcebergTable) table, snapshotId, predicate, limit);
}
private List<RemoteFileInfo> getRemoteFileInfos(IcebergTable table, long snapshotId,
ScalarOperator predicate, long limit) {
RemoteFileInfo remoteFileInfo = new RemoteFileInfo();
String dbName = table.getRemoteDbName();
String tableName = table.getRemoteTableName();
IcebergFilter key = IcebergFilter.of(dbName, tableName, snapshotId, predicate);
triggerIcebergPlanFilesIfNeeded(key, table, predicate, limit);
List<FileScanTask> icebergScanTasks = splitTasks.get(key);
if (icebergScanTasks == null) {
throw new StarRocksConnectorException("Missing iceberg split task for table:[{}.{}]. predicate:[{}]",
dbName, tableName, predicate);
}
List<RemoteFileDesc> remoteFileDescs = Lists.newArrayList(RemoteFileDesc.createIcebergRemoteFileDesc(icebergScanTasks));
remoteFileInfo.setFiles(remoteFileDescs);
return Lists.newArrayList(remoteFileInfo);
}
@Override
public List<PartitionInfo> getPartitions(Table table, List<String> partitionNames) {
Map<String, Partition> partitionMap = Maps.newHashMap();
IcebergTable icebergTable = (IcebergTable) table;
PartitionsTable partitionsTable = (PartitionsTable) MetadataTableUtils.
createMetadataTableInstance(icebergTable.getNativeTable(), MetadataTableType.PARTITIONS);
if (icebergTable.isUnPartitioned()) {
try (CloseableIterable<FileScanTask> tasks = partitionsTable.newScan().planFiles()) {
for (FileScanTask task : tasks) {
CloseableIterable<StructLike> rows = task.asDataTask().rows();
for (StructLike row : rows) {
long lastUpdated = row.get(7, Long.class);
Partition partition = new Partition(lastUpdated);
return ImmutableList.of(partition);
}
}
return ImmutableList.of(new Partition(-1));
} catch (IOException e) {
throw new StarRocksConnectorException("Failed to get partitions for table: " + table.getName(), e);
}
} else {
try (CloseableIterable<FileScanTask> tasks = partitionsTable.newScan().planFiles()) {
for (FileScanTask task : tasks) {
CloseableIterable<StructLike> rows = task.asDataTask().rows();
for (StructLike row : rows) {
StructProjection partitionData = row.get(0, StructProjection.class);
int specId = row.get(1, Integer.class);
long lastUpdated = row.get(9, Long.class);
PartitionSpec spec = icebergTable.getNativeTable().specs().get(specId);
Partition partition = new Partition(lastUpdated);
String partitionName =
PartitionUtil.convertIcebergPartitionToPartitionName(spec, partitionData);
partitionMap.put(partitionName, partition);
}
}
} catch (IOException e) {
throw new StarRocksConnectorException("Failed to get partitions for table: " + table.getName(), e);
}
}
ImmutableList.Builder<PartitionInfo> partitions = ImmutableList.builder();
partitionNames.forEach(partitionName -> partitions.add(partitionMap.get(partitionName)));
return partitions.build();
}
@Override
public boolean prepareMetadata(MetaPreparationItem item, Tracers tracers) {
IcebergFilter key;
IcebergTable icebergTable;
icebergTable = (IcebergTable) item.getTable();
String dbName = icebergTable.getRemoteDbName();
String tableName = icebergTable.getRemoteTableName();
Optional<Snapshot> snapshot = icebergTable.getSnapshot();
if (snapshot.isEmpty()) {
return true;
}
key = IcebergFilter.of(dbName, tableName, snapshot.get().snapshotId(), item.getPredicate());
if (!preparedTables.add(key)) {
return true;
}
triggerIcebergPlanFilesIfNeeded(key, icebergTable, item.getPredicate(), item.getLimit(), tracers);
return true;
}
private void triggerIcebergPlanFilesIfNeeded(IcebergFilter key, IcebergTable table, ScalarOperator predicate, long limit) {
triggerIcebergPlanFilesIfNeeded(key, table, predicate, limit, null);
}
private void triggerIcebergPlanFilesIfNeeded(IcebergFilter key, IcebergTable table, ScalarOperator predicate,
long limit, Tracers tracers) {
if (!scannedTables.contains(key)) {
try (Timer ignored = Tracers.watchScope(EXTERNAL, "ICEBERG.processSplit." + key)) {
collectTableStatisticsAndCacheIcebergSplit(table, predicate, limit, tracers);
}
}
}
public List<PartitionKey> getPrunedPartitions(Table table, ScalarOperator predicate, long limit) {
IcebergTable icebergTable = (IcebergTable) table;
String dbName = icebergTable.getRemoteDbName();
String tableName = icebergTable.getRemoteTableName();
Optional<Snapshot> snapshot = icebergTable.getSnapshot();
if (!snapshot.isPresent()) {
return new ArrayList<>();
}
IcebergFilter key = IcebergFilter.of(dbName, tableName, snapshot.get().snapshotId(), predicate);
triggerIcebergPlanFilesIfNeeded(key, icebergTable, predicate, limit);
List<PartitionKey> partitionKeys = new ArrayList<>();
List<FileScanTask> icebergSplitTasks = splitTasks.get(key);
if (icebergSplitTasks == null) {
throw new StarRocksConnectorException("Missing iceberg split task for table:[{}.{}]. predicate:[{}]",
dbName, tableName, predicate);
}
Set<List<String>> scannedPartitions = new HashSet<>();
PartitionSpec spec = icebergTable.getNativeTable().spec();
List<Column> partitionColumns = icebergTable.getPartitionColumnsIncludeTransformed();
for (FileScanTask fileScanTask : icebergSplitTasks) {
org.apache.iceberg.PartitionData partitionData = (org.apache.iceberg.PartitionData) fileScanTask.file().partition();
List<String> values = PartitionUtil.getIcebergPartitionValues(spec, partitionData);
if (values.size() != partitionColumns.size()) {
continue;
}
if (scannedPartitions.contains(values)) {
continue;
} else {
scannedPartitions.add(values);
}
try {
List<com.starrocks.catalog.Type> srTypes = new ArrayList<>();
for (PartitionField partitionField : spec.fields()) {
if (partitionField.transform().isVoid()) {
continue;
}
if (!partitionField.transform().isIdentity()) {
Type sourceType = spec.schema().findType(partitionField.sourceId());
Type resultType = partitionField.transform().getResultType(sourceType);
if (resultType == Types.DateType.get()) {
resultType = Types.IntegerType.get();
}
srTypes.add(fromIcebergType(resultType));
continue;
}
srTypes.add(icebergTable.getColumn(icebergTable.getPartitionSourceName(spec.schema(),
partitionField)).getType());
}
if (icebergTable.hasPartitionTransformedEvolution()) {
srTypes = partitionColumns.stream()
.map(Column::getType)
.collect(Collectors.toList());
}
partitionKeys.add(createPartitionKeyWithType(values, srTypes, table.getType()));
} catch (Exception e) {
LOG.error("create partition key failed.", e);
throw new StarRocksConnectorException(e.getMessage());
}
}
return partitionKeys;
}
/**
* To optimize the MetricsModes of the Iceberg tables, it's necessary to display the columns MetricsMode in the
* ICEBERG query profile.
* <br>
* None:
* <p>
* Under this mode, value_counts, null_value_counts, nan_value_counts, lower_bounds, upper_bounds
* are not persisted.
* </p>
* Counts:
* <p>
* Under this mode, only value_counts, null_value_counts, nan_value_counts are persisted.
* </p>
* Truncate:
* <p>
* Under this mode, value_counts, null_value_counts, nan_value_counts and truncated lower_bounds,
* upper_bounds are persisted.
* </p>
* Full:
* <p>
* Under this mode, value_counts, null_value_counts, nan_value_counts and full lower_bounds,
* upper_bounds are persisted.
* </p>
*/
public static Map<String, MetricsModes.MetricsMode> getIcebergMetricsConfig(IcebergTable table) {
MetricsModes.MetricsMode defaultMode = MetricsModes.fromString(DEFAULT_WRITE_METRICS_MODE_DEFAULT);
MetricsConfig metricsConf = MetricsConfig.forTable(table.getNativeTable());
Map<String, MetricsModes.MetricsMode> filedToMetricsMode = Maps.newHashMap();
for (Types.NestedField field : table.getNativeTable().schema().columns()) {
MetricsModes.MetricsMode mode = metricsConf.columnMode(field.name());
if (!mode.equals(defaultMode)) {
filedToMetricsMode.put(field.name(), mode);
}
}
return filedToMetricsMode;
}
@Override
public Statistics getTableStatistics(OptimizerContext session,
Table table,
Map<ColumnRefOperator, Column> columns,
List<PartitionKey> partitionKeys,
ScalarOperator predicate,
long limit) {
IcebergTable icebergTable = (IcebergTable) table;
Optional<Snapshot> snapshot = icebergTable.getSnapshot();
long snapshotId;
if (snapshot.isPresent()) {
snapshotId = snapshot.get().snapshotId();
} else {
Statistics.Builder statisticsBuilder = Statistics.builder();
statisticsBuilder.setOutputRowCount(1);
statisticsBuilder.addColumnStatistics(statisticProvider.buildUnknownColumnStatistics(columns.keySet()));
return statisticsBuilder.build();
}
IcebergFilter key = IcebergFilter.of(
icebergTable.getRemoteDbName(), icebergTable.getRemoteTableName(), snapshotId, predicate);
triggerIcebergPlanFilesIfNeeded(key, icebergTable, predicate, limit);
if (!session.getSessionVariable().enableIcebergColumnStatistics()) {
List<FileScanTask> icebergScanTasks = splitTasks.get(key);
if (icebergScanTasks == null) {
throw new StarRocksConnectorException("Missing iceberg split task for table:[{}.{}]. predicate:[{}]",
icebergTable.getRemoteDbName(), icebergTable.getRemoteTableName(), predicate);
}
try (Timer ignored = Tracers.watchScope(EXTERNAL, "ICEBERG.calculateCardinality" + key)) {
return statisticProvider.getCardinalityStats(columns, icebergScanTasks);
}
} else {
return statisticProvider.getTableStatistics(icebergTable, columns, session, predicate);
}
}
private IcebergSplitScanTask buildIcebergSplitScanTask(
FileScanTask fileScanTask, Expression icebergPredicate, IcebergFilter filter) {
long offset = fileScanTask.start();
long length = fileScanTask.length();
DataFile dataFileWithoutStats = fileScanTask.file().copyWithoutStats();
DeleteFile[] deleteFiles = fileScanTask.deletes().stream()
.map(DeleteFile::copyWithoutStats)
.toArray(DeleteFile[]::new);
PartitionSpec taskSpec = fileScanTask.spec();
Schema taskSchema = fileScanTask.spec().schema();
String schemaString;
String partitionString;
FileScanTaskSchema schemaKey = new FileScanTaskSchema(filter.getDatabaseName(), filter.getTableName(),
taskSchema.schemaId(), taskSpec.specId());
Pair<String, String> schema = fileScanTaskSchemas.get(schemaKey);
if (schema == null) {
schemaString = SchemaParser.toJson(fileScanTask.spec().schema());
partitionString = PartitionSpecParser.toJson(fileScanTask.spec());
fileScanTaskSchemas.put(schemaKey, Pair.create(schemaString, partitionString));
} else {
schemaString = schema.first;
partitionString = schema.second;
}
ResidualEvaluator residualEvaluator = ResidualEvaluator.of(taskSpec, icebergPredicate, true);
BaseFileScanTask baseFileScanTask = new BaseFileScanTask(
dataFileWithoutStats,
deleteFiles,
schemaString,
partitionString,
residualEvaluator);
return new IcebergSplitScanTask(offset, length, baseFileScanTask);
}
@Override
public void refreshTable(String srDbName, Table table, List<String> partitionNames, boolean onlyCachedPartitions) {
if (isResourceMappingCatalog(catalogName)) {
refreshTableWithResource(table);
} else {
IcebergTable icebergTable = (IcebergTable) table;
String dbName = icebergTable.getRemoteDbName();
String tableName = icebergTable.getRemoteTableName();
tables.remove(TableIdentifier.of(dbName, tableName));
try {
icebergCatalog.refreshTable(dbName, tableName, jobPlanningExecutor);
} catch (Exception e) {
LOG.error("Failed to refresh table {}.{}.{}. invalidate cache", catalogName, dbName, tableName, e);
icebergCatalog.invalidateCache(new CachingIcebergCatalog.IcebergTableName(dbName, tableName));
}
}
}
private void refreshTableWithResource(Table table) {
IcebergTable icebergTable = (IcebergTable) table;
org.apache.iceberg.Table nativeTable = icebergTable.getNativeTable();
try {
if (nativeTable instanceof BaseTable) {
BaseTable baseTable = (BaseTable) nativeTable;
if (baseTable.operations().refresh() == null) {
throw new NoSuchTableException("No such table: %s", nativeTable.name());
}
} else {
throw new StarRocksConnectorException("Invalid table type of %s, it should be a BaseTable!", nativeTable.name());
}
} catch (NoSuchTableException e) {
throw new StarRocksConnectorException("No such table %s", nativeTable.name());
} catch (IllegalStateException ei) {
throw new StarRocksConnectorException("Refresh table %s with failure, the table under hood" +
" may have been dropped. You should re-create the external table. cause %s",
nativeTable.name(), ei.getMessage());
}
icebergTable.resetSnapshot();
}
@Override
public void finishSink(String dbName, String tableName, List<TSinkCommitInfo> commitInfos) {
boolean isOverwrite = false;
if (!commitInfos.isEmpty()) {
TSinkCommitInfo sinkCommitInfo = commitInfos.get(0);
if (sinkCommitInfo.isSetIs_overwrite()) {
isOverwrite = sinkCommitInfo.is_overwrite;
}
}
List<TIcebergDataFile> dataFiles = commitInfos.stream()
.map(TSinkCommitInfo::getIceberg_data_file).collect(Collectors.toList());
IcebergTable table = (IcebergTable) getTable(dbName, tableName);
org.apache.iceberg.Table nativeTbl = table.getNativeTable();
Transaction transaction = nativeTbl.newTransaction();
BatchWrite batchWrite = getBatchWrite(transaction, isOverwrite);
PartitionSpec partitionSpec = nativeTbl.spec();
for (TIcebergDataFile dataFile : dataFiles) {
Metrics metrics = IcebergApiConverter.buildDataFileMetrics(dataFile);
DataFiles.Builder builder =
DataFiles.builder(partitionSpec)
.withMetrics(metrics)
.withPath(dataFile.path)
.withFormat(dataFile.format)
.withRecordCount(dataFile.record_count)
.withFileSizeInBytes(dataFile.file_size_in_bytes)
.withSplitOffsets(dataFile.split_offsets);
if (partitionSpec.isPartitioned()) {
String relativePartitionLocation = getIcebergRelativePartitionPath(
nativeTbl.location(), dataFile.partition_path);
PartitionData partitionData = partitionDataFromPath(
relativePartitionLocation, partitionSpec);
builder.withPartition(partitionData);
}
batchWrite.addFile(builder.build());
}
try {
batchWrite.commit();
transaction.commitTransaction();
asyncRefreshOthersFeMetadataCache(dbName, tableName);
} catch (Exception e) {
List<String> toDeleteFiles = dataFiles.stream()
.map(TIcebergDataFile::getPath)
.collect(Collectors.toList());
icebergCatalog.deleteUncommittedDataFiles(toDeleteFiles);
LOG.error("Failed to commit iceberg transaction on {}.{}", dbName, tableName, e);
throw new StarRocksConnectorException(e.getMessage());
} finally {
icebergCatalog.invalidateCacheWithoutTable(new CachingIcebergCatalog.IcebergTableName(dbName, tableName));
}
}
private void asyncRefreshOthersFeMetadataCache(String dbName, String tableName) {
refreshOtherFeExecutor.execute(() -> {
LOG.info("Start to refresh others fe iceberg metadata cache on {}.{}.{}", catalogName, dbName, tableName);
try {
GlobalStateMgr.getCurrentState().refreshOthersFeTable(
new TableName(catalogName, dbName, tableName), new ArrayList<>(), false);
} catch (DdlException e) {
LOG.error("Failed to refresh others fe iceberg metadata cache {}.{}.{}", catalogName, dbName, tableName, e);
throw new StarRocksConnectorException(e.getMessage());
}
LOG.info("Finish to refresh others fe iceberg metadata cache on {}.{}.{}", catalogName, dbName, tableName);
});
}
public BatchWrite getBatchWrite(Transaction transaction, boolean isOverwrite) {
return isOverwrite ? new DynamicOverwrite(transaction) : new Append(transaction);
}
public static PartitionData partitionDataFromPath(String relativePartitionPath, PartitionSpec spec) {
PartitionData data = new PartitionData(spec.fields().size());
String[] partitions = relativePartitionPath.split("/", -1);
List<PartitionField> partitionFields = spec.fields();
for (int i = 0; i < partitions.length; i++) {
PartitionField field = partitionFields.get(i);
String[] parts = partitions[i].split("=", 2);
Preconditions.checkArgument(parts.length == 2 && parts[0] != null &&
field.name().equals(parts[0]), "Invalid partition: %s", partitions[i]);
org.apache.iceberg.types.Type sourceType = spec.partitionType().fields().get(i).type();
data.set(i, Conversions.fromPartitionString(sourceType, parts[1]));
}
return data;
}
public static String getIcebergRelativePartitionPath(String tableLocation, String partitionLocation) {
tableLocation = tableLocation.endsWith("/") ? tableLocation.substring(0, tableLocation.length() - 1) : tableLocation;
String tableLocationWithData = tableLocation + "/data/";
String path = PartitionUtil.getSuffixName(tableLocationWithData, partitionLocation);
if (path.startsWith("/")) {
path = path.substring(1);
}
if (path.endsWith("/")) {
path = path.substring(0, path.length() - 1);
}
return path;
}
public static boolean onlyHasPartitionPredicate(Table table, ScalarOperator predicate) {
if (predicate == null) {
return true;
}
List<ColumnRefOperator> columnRefOperators = predicate.getColumnRefs();
List<String> partitionColNames = table.getPartitionColumnNames();
for (ColumnRefOperator c : columnRefOperators) {
if (!partitionColNames.contains(c.getName())) {
return false;
}
}
return true;
}
private boolean enablePruneManifest() {
if (ConnectContext.get() == null) {
return false;
}
if (ConnectContext.get().getSessionVariable() == null) {
return false;
}
return ConnectContext.get().getSessionVariable().isEnablePruneIcebergManifest();
}
private boolean enableCollectColumnStatistics() {
if (ConnectContext.get() == null) {
return false;
}
if (ConnectContext.get().getSessionVariable() == null) {
return false;
}
return ConnectContext.get().getSessionVariable().enableIcebergColumnStatistics();
}
@Override
public void clear() {
splitTasks.clear();
databases.clear();
tables.clear();
scannedTables.clear();
metricsReporter.clear();
}
interface BatchWrite {
void addFile(DataFile file);
void commit();
}
static class Append implements BatchWrite {
private final AppendFiles append;
public Append(Transaction txn) {
append = txn.newAppend();
}
@Override
public void addFile(DataFile file) {
append.appendFile(file);
}
@Override
public void commit() {
append.commit();
}
}
static class DynamicOverwrite implements BatchWrite {
private final ReplacePartitions replace;
public DynamicOverwrite(Transaction txn) {
replace = txn.newReplacePartitions();
}
@Override
public void addFile(DataFile file) {
replace.addFile(file);
}
@Override
public void commit() {
replace.commit();
}
}
public static class PartitionData implements StructLike {
private final Object[] values;
private PartitionData(int size) {
this.values = new Object[size];
}
@Override
public int size() {
return values.length;
}
@Override
public <T> T get(int pos, Class<T> javaClass) {
return javaClass.cast(values[pos]);
}
@Override
public <T> void set(int pos, T value) {
if (value instanceof ByteBuffer) {
ByteBuffer buffer = (ByteBuffer) value;
byte[] bytes = new byte[buffer.remaining()];
buffer.duplicate().get(bytes);
values[pos] = bytes;
} else {
values[pos] = value;
}
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
PartitionData that = (PartitionData) other;
return Arrays.equals(values, that.values);
}
@Override
public int hashCode() {
return Arrays.hashCode(values);
}
}
@Override
public CloudConfiguration getCloudConfiguration() {
return hdfsEnvironment.getCloudConfiguration();
}
private static class FileScanTaskSchema {
private final String dbName;
private final String tableName;
private final int schemaId;
private final int specId;
public FileScanTaskSchema(String dbName, String tableName, int schemaId, int specId) {
this.dbName = dbName;
this.tableName = tableName;
this.schemaId = schemaId;
this.specId = specId;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
FileScanTaskSchema that = (FileScanTaskSchema) o;
return schemaId == that.schemaId && specId == that.specId &&
Objects.equals(dbName, that.dbName) && Objects.equals(tableName, that.tableName);
}
@Override
public int hashCode() {
return Objects.hash(dbName, tableName, schemaId, specId);
}
}
} | class IcebergMetadata implements ConnectorMetadata {
private static final Logger LOG = LogManager.getLogger(IcebergMetadata.class);
public static final String LOCATION_PROPERTY = "location";
public static final String FILE_FORMAT = "file_format";
public static final String COMPRESSION_CODEC = "compression_codec";
public static final String COMMENT = "comment";
private final String catalogName;
private final HdfsEnvironment hdfsEnvironment;
private final IcebergCatalog icebergCatalog;
private final IcebergStatisticProvider statisticProvider = new IcebergStatisticProvider();
private final Map<TableIdentifier, Table> tables = new ConcurrentHashMap<>();
private final Map<String, Database> databases = new ConcurrentHashMap<>();
private final Map<IcebergFilter, List<FileScanTask>> splitTasks = new ConcurrentHashMap<>();
private final Set<IcebergFilter> scannedTables = new HashSet<>();
private final Set<IcebergFilter> preparedTables = ConcurrentHashMap.newKeySet();
private final Map<FileScanTaskSchema, Pair<String, String>> fileScanTaskSchemas = new ConcurrentHashMap<>();
private final ExecutorService jobPlanningExecutor;
private final ExecutorService refreshOtherFeExecutor;
private final IcebergMetricsReporter metricsReporter;
public IcebergMetadata(String catalogName, HdfsEnvironment hdfsEnvironment, IcebergCatalog icebergCatalog,
ExecutorService jobPlanningExecutor, ExecutorService refreshOtherFeExecutor) {
this.catalogName = catalogName;
this.hdfsEnvironment = hdfsEnvironment;
this.icebergCatalog = icebergCatalog;
this.metricsReporter = new IcebergMetricsReporter();
this.jobPlanningExecutor = jobPlanningExecutor;
this.refreshOtherFeExecutor = refreshOtherFeExecutor;
}
@Override
public List<String> listDbNames() {
return icebergCatalog.listAllDatabases();
}
@Override
public void createDb(String dbName, Map<String, String> properties) throws AlreadyExistsException {
if (dbExists(dbName)) {
throw new AlreadyExistsException("Database Already Exists");
}
icebergCatalog.createDb(dbName, properties);
}
@Override
public void dropDb(String dbName, boolean isForceDrop) throws MetaNotFoundException {
if (listTableNames(dbName).size() != 0) {
throw new StarRocksConnectorException("Database %s not empty", dbName);
}
icebergCatalog.dropDb(dbName);
databases.remove(dbName);
}
@Override
public Database getDb(String dbName) {
if (databases.containsKey(dbName)) {
return databases.get(dbName);
}
Database db;
try {
db = icebergCatalog.getDB(dbName);
} catch (NoSuchNamespaceException e) {
LOG.error("Database {} not found", dbName, e);
return null;
}
databases.put(dbName, db);
return db;
}
@Override
public List<String> listTableNames(String dbName) {
return icebergCatalog.listTables(dbName);
}
@Override
public boolean createTable(CreateTableStmt stmt) throws DdlException {
String dbName = stmt.getDbName();
String tableName = stmt.getTableName();
Schema schema = toIcebergApiSchema(stmt.getColumns());
PartitionDesc partitionDesc = stmt.getPartitionDesc();
List<String> partitionColNames = partitionDesc == null ? Lists.newArrayList() :
((ListPartitionDesc) partitionDesc).getPartitionColNames();
PartitionSpec partitionSpec = parsePartitionFields(schema, partitionColNames);
Map<String, String> properties = stmt.getProperties() == null ? new HashMap<>() : stmt.getProperties();
String tableLocation = properties.get(LOCATION_PROPERTY);
properties.put(COMMENT, stmt.getComment());
Map<String, String> createTableProperties = IcebergApiConverter.rebuildCreateTableProperties(properties);
return icebergCatalog.createTable(dbName, tableName, schema, partitionSpec, tableLocation, createTableProperties);
}
@Override
public void alterTable(AlterTableStmt stmt) throws UserException {
String dbName = stmt.getDbName();
String tableName = stmt.getTableName();
org.apache.iceberg.Table table = icebergCatalog.getTable(dbName, tableName);
if (table == null) {
throw new StarRocksConnectorException(
"Failed to load iceberg table: " + stmt.getTbl().toString());
}
IcebergAlterTableExecutor executor = new IcebergAlterTableExecutor(stmt, table, icebergCatalog);
executor.execute();
synchronized (this) {
tables.remove(TableIdentifier.of(dbName, tableName));
try {
icebergCatalog.refreshTable(dbName, tableName, jobPlanningExecutor);
} catch (Exception exception) {
LOG.error("Failed to refresh caching iceberg table.");
icebergCatalog.invalidateCache(new CachingIcebergCatalog.IcebergTableName(dbName, tableName));
}
asyncRefreshOthersFeMetadataCache(dbName, tableName);
}
}
@Override
public void dropTable(DropTableStmt stmt) {
Table icebergTable = getTable(stmt.getDbName(), stmt.getTableName());
if (icebergTable == null) {
return;
}
icebergCatalog.dropTable(stmt.getDbName(), stmt.getTableName(), stmt.isForceDrop());
tables.remove(TableIdentifier.of(stmt.getDbName(), stmt.getTableName()));
StatisticUtils.dropStatisticsAfterDropTable(icebergTable);
asyncRefreshOthersFeMetadataCache(stmt.getDbName(), stmt.getTableName());
}
@Override
public Table getTable(String dbName, String tblName) {
TableIdentifier identifier = TableIdentifier.of(dbName, tblName);
if (tables.containsKey(identifier)) {
return tables.get(identifier);
}
try {
IcebergCatalogType catalogType = icebergCatalog.getIcebergCatalogType();
org.apache.iceberg.Table icebergTable = icebergCatalog.getTable(dbName, tblName);
Table table = IcebergApiConverter.toIcebergTable(icebergTable, catalogName, dbName, tblName, catalogType.name());
table.setComment(icebergTable.properties().getOrDefault(COMMENT, ""));
tables.put(identifier, table);
return table;
} catch (StarRocksConnectorException | NoSuchTableException e) {
LOG.error("Failed to get iceberg table {}", identifier, e);
return null;
}
}
@Override
public boolean tableExists(String dbName, String tblName) {
return icebergCatalog.tableExists(dbName, tblName);
}
@Override
public List<String> listPartitionNames(String dbName, String tblName) {
IcebergCatalogType nativeType = icebergCatalog.getIcebergCatalogType();
if (nativeType != HIVE_CATALOG && nativeType != REST_CATALOG && nativeType != GLUE_CATALOG) {
throw new StarRocksConnectorException(
"Do not support get partitions from catalog type: " + nativeType);
}
return icebergCatalog.listPartitionNames(dbName, tblName, jobPlanningExecutor);
}
@Override
public List<RemoteFileInfo> getRemoteFileInfos(Table table, List<PartitionKey> partitionKeys,
long snapshotId, ScalarOperator predicate,
List<String> fieldNames, long limit) {
return getRemoteFileInfos((IcebergTable) table, snapshotId, predicate, limit);
}
private List<RemoteFileInfo> getRemoteFileInfos(IcebergTable table, long snapshotId,
ScalarOperator predicate, long limit) {
RemoteFileInfo remoteFileInfo = new RemoteFileInfo();
String dbName = table.getRemoteDbName();
String tableName = table.getRemoteTableName();
IcebergFilter key = IcebergFilter.of(dbName, tableName, snapshotId, predicate);
triggerIcebergPlanFilesIfNeeded(key, table, predicate, limit);
List<FileScanTask> icebergScanTasks = splitTasks.get(key);
if (icebergScanTasks == null) {
throw new StarRocksConnectorException("Missing iceberg split task for table:[{}.{}]. predicate:[{}]",
dbName, tableName, predicate);
}
List<RemoteFileDesc> remoteFileDescs = Lists.newArrayList(RemoteFileDesc.createIcebergRemoteFileDesc(icebergScanTasks));
remoteFileInfo.setFiles(remoteFileDescs);
return Lists.newArrayList(remoteFileInfo);
}
@Override
public List<PartitionInfo> getPartitions(Table table, List<String> partitionNames) {
Map<String, Partition> partitionMap = Maps.newHashMap();
IcebergTable icebergTable = (IcebergTable) table;
PartitionsTable partitionsTable = (PartitionsTable) MetadataTableUtils.
createMetadataTableInstance(icebergTable.getNativeTable(), MetadataTableType.PARTITIONS);
if (icebergTable.isUnPartitioned()) {
try (CloseableIterable<FileScanTask> tasks = partitionsTable.newScan().planFiles()) {
for (FileScanTask task : tasks) {
CloseableIterable<StructLike> rows = task.asDataTask().rows();
for (StructLike row : rows) {
long lastUpdated = row.get(7, Long.class);
Partition partition = new Partition(lastUpdated);
return ImmutableList.of(partition);
}
}
return ImmutableList.of(new Partition(-1));
} catch (IOException e) {
throw new StarRocksConnectorException("Failed to get partitions for table: " + table.getName(), e);
}
} else {
try (CloseableIterable<FileScanTask> tasks = partitionsTable.newScan().planFiles()) {
for (FileScanTask task : tasks) {
CloseableIterable<StructLike> rows = task.asDataTask().rows();
for (StructLike row : rows) {
StructProjection partitionData = row.get(0, StructProjection.class);
int specId = row.get(1, Integer.class);
long lastUpdated = row.get(9, Long.class);
PartitionSpec spec = icebergTable.getNativeTable().specs().get(specId);
Partition partition = new Partition(lastUpdated);
String partitionName =
PartitionUtil.convertIcebergPartitionToPartitionName(spec, partitionData);
partitionMap.put(partitionName, partition);
}
}
} catch (IOException e) {
throw new StarRocksConnectorException("Failed to get partitions for table: " + table.getName(), e);
}
}
ImmutableList.Builder<PartitionInfo> partitions = ImmutableList.builder();
partitionNames.forEach(partitionName -> partitions.add(partitionMap.get(partitionName)));
return partitions.build();
}
@Override
public boolean prepareMetadata(MetaPreparationItem item, Tracers tracers) {
IcebergFilter key;
IcebergTable icebergTable;
icebergTable = (IcebergTable) item.getTable();
String dbName = icebergTable.getRemoteDbName();
String tableName = icebergTable.getRemoteTableName();
Optional<Snapshot> snapshot = icebergTable.getSnapshot();
if (snapshot.isEmpty()) {
return true;
}
key = IcebergFilter.of(dbName, tableName, snapshot.get().snapshotId(), item.getPredicate());
if (!preparedTables.add(key)) {
return true;
}
triggerIcebergPlanFilesIfNeeded(key, icebergTable, item.getPredicate(), item.getLimit(), tracers);
return true;
}
private void triggerIcebergPlanFilesIfNeeded(IcebergFilter key, IcebergTable table, ScalarOperator predicate, long limit) {
triggerIcebergPlanFilesIfNeeded(key, table, predicate, limit, null);
}
private void triggerIcebergPlanFilesIfNeeded(IcebergFilter key, IcebergTable table, ScalarOperator predicate,
long limit, Tracers tracers) {
if (!scannedTables.contains(key)) {
try (Timer ignored = Tracers.watchScope(EXTERNAL, "ICEBERG.processSplit." + key)) {
collectTableStatisticsAndCacheIcebergSplit(table, predicate, limit, tracers);
}
}
}
public List<PartitionKey> getPrunedPartitions(Table table, ScalarOperator predicate, long limit) {
IcebergTable icebergTable = (IcebergTable) table;
String dbName = icebergTable.getRemoteDbName();
String tableName = icebergTable.getRemoteTableName();
Optional<Snapshot> snapshot = icebergTable.getSnapshot();
if (!snapshot.isPresent()) {
return new ArrayList<>();
}
IcebergFilter key = IcebergFilter.of(dbName, tableName, snapshot.get().snapshotId(), predicate);
triggerIcebergPlanFilesIfNeeded(key, icebergTable, predicate, limit);
List<PartitionKey> partitionKeys = new ArrayList<>();
List<FileScanTask> icebergSplitTasks = splitTasks.get(key);
if (icebergSplitTasks == null) {
throw new StarRocksConnectorException("Missing iceberg split task for table:[{}.{}]. predicate:[{}]",
dbName, tableName, predicate);
}
Set<List<String>> scannedPartitions = new HashSet<>();
PartitionSpec spec = icebergTable.getNativeTable().spec();
List<Column> partitionColumns = icebergTable.getPartitionColumnsIncludeTransformed();
for (FileScanTask fileScanTask : icebergSplitTasks) {
org.apache.iceberg.PartitionData partitionData = (org.apache.iceberg.PartitionData) fileScanTask.file().partition();
List<String> values = PartitionUtil.getIcebergPartitionValues(spec, partitionData);
if (values.size() != partitionColumns.size()) {
continue;
}
if (scannedPartitions.contains(values)) {
continue;
} else {
scannedPartitions.add(values);
}
try {
List<com.starrocks.catalog.Type> srTypes = new ArrayList<>();
for (PartitionField partitionField : spec.fields()) {
if (partitionField.transform().isVoid()) {
continue;
}
if (!partitionField.transform().isIdentity()) {
Type sourceType = spec.schema().findType(partitionField.sourceId());
Type resultType = partitionField.transform().getResultType(sourceType);
if (resultType == Types.DateType.get()) {
resultType = Types.IntegerType.get();
}
srTypes.add(fromIcebergType(resultType));
continue;
}
srTypes.add(icebergTable.getColumn(icebergTable.getPartitionSourceName(spec.schema(),
partitionField)).getType());
}
if (icebergTable.hasPartitionTransformedEvolution()) {
srTypes = partitionColumns.stream()
.map(Column::getType)
.collect(Collectors.toList());
}
partitionKeys.add(createPartitionKeyWithType(values, srTypes, table.getType()));
} catch (Exception e) {
LOG.error("create partition key failed.", e);
throw new StarRocksConnectorException(e.getMessage());
}
}
return partitionKeys;
}
/**
* To optimize the MetricsModes of the Iceberg tables, it's necessary to display the columns MetricsMode in the
* ICEBERG query profile.
* <br>
* None:
* <p>
* Under this mode, value_counts, null_value_counts, nan_value_counts, lower_bounds, upper_bounds
* are not persisted.
* </p>
* Counts:
* <p>
* Under this mode, only value_counts, null_value_counts, nan_value_counts are persisted.
* </p>
* Truncate:
* <p>
* Under this mode, value_counts, null_value_counts, nan_value_counts and truncated lower_bounds,
* upper_bounds are persisted.
* </p>
* Full:
* <p>
* Under this mode, value_counts, null_value_counts, nan_value_counts and full lower_bounds,
* upper_bounds are persisted.
* </p>
*/
public static Map<String, MetricsModes.MetricsMode> getIcebergMetricsConfig(IcebergTable table) {
MetricsModes.MetricsMode defaultMode = MetricsModes.fromString(DEFAULT_WRITE_METRICS_MODE_DEFAULT);
MetricsConfig metricsConf = MetricsConfig.forTable(table.getNativeTable());
Map<String, MetricsModes.MetricsMode> filedToMetricsMode = Maps.newHashMap();
for (Types.NestedField field : table.getNativeTable().schema().columns()) {
MetricsModes.MetricsMode mode = metricsConf.columnMode(field.name());
if (!mode.equals(defaultMode)) {
filedToMetricsMode.put(field.name(), mode);
}
}
return filedToMetricsMode;
}
@Override
public Statistics getTableStatistics(OptimizerContext session,
Table table,
Map<ColumnRefOperator, Column> columns,
List<PartitionKey> partitionKeys,
ScalarOperator predicate,
long limit) {
IcebergTable icebergTable = (IcebergTable) table;
Optional<Snapshot> snapshot = icebergTable.getSnapshot();
long snapshotId;
if (snapshot.isPresent()) {
snapshotId = snapshot.get().snapshotId();
} else {
Statistics.Builder statisticsBuilder = Statistics.builder();
statisticsBuilder.setOutputRowCount(1);
statisticsBuilder.addColumnStatistics(statisticProvider.buildUnknownColumnStatistics(columns.keySet()));
return statisticsBuilder.build();
}
IcebergFilter key = IcebergFilter.of(
icebergTable.getRemoteDbName(), icebergTable.getRemoteTableName(), snapshotId, predicate);
triggerIcebergPlanFilesIfNeeded(key, icebergTable, predicate, limit);
if (!session.getSessionVariable().enableIcebergColumnStatistics()) {
List<FileScanTask> icebergScanTasks = splitTasks.get(key);
if (icebergScanTasks == null) {
throw new StarRocksConnectorException("Missing iceberg split task for table:[{}.{}]. predicate:[{}]",
icebergTable.getRemoteDbName(), icebergTable.getRemoteTableName(), predicate);
}
try (Timer ignored = Tracers.watchScope(EXTERNAL, "ICEBERG.calculateCardinality" + key)) {
return statisticProvider.getCardinalityStats(columns, icebergScanTasks);
}
} else {
return statisticProvider.getTableStatistics(icebergTable, columns, session, predicate);
}
}
private IcebergSplitScanTask buildIcebergSplitScanTask(
FileScanTask fileScanTask, Expression icebergPredicate, IcebergFilter filter) {
long offset = fileScanTask.start();
long length = fileScanTask.length();
DataFile dataFileWithoutStats = fileScanTask.file().copyWithoutStats();
DeleteFile[] deleteFiles = fileScanTask.deletes().stream()
.map(DeleteFile::copyWithoutStats)
.toArray(DeleteFile[]::new);
PartitionSpec taskSpec = fileScanTask.spec();
Schema taskSchema = fileScanTask.spec().schema();
String schemaString;
String partitionString;
FileScanTaskSchema schemaKey = new FileScanTaskSchema(filter.getDatabaseName(), filter.getTableName(),
taskSchema.schemaId(), taskSpec.specId());
Pair<String, String> schema = fileScanTaskSchemas.get(schemaKey);
if (schema == null) {
schemaString = SchemaParser.toJson(fileScanTask.spec().schema());
partitionString = PartitionSpecParser.toJson(fileScanTask.spec());
fileScanTaskSchemas.put(schemaKey, Pair.create(schemaString, partitionString));
} else {
schemaString = schema.first;
partitionString = schema.second;
}
ResidualEvaluator residualEvaluator = ResidualEvaluator.of(taskSpec, icebergPredicate, true);
BaseFileScanTask baseFileScanTask = new BaseFileScanTask(
dataFileWithoutStats,
deleteFiles,
schemaString,
partitionString,
residualEvaluator);
return new IcebergSplitScanTask(offset, length, baseFileScanTask);
}
@Override
public void refreshTable(String srDbName, Table table, List<String> partitionNames, boolean onlyCachedPartitions) {
if (isResourceMappingCatalog(catalogName)) {
refreshTableWithResource(table);
} else {
IcebergTable icebergTable = (IcebergTable) table;
String dbName = icebergTable.getRemoteDbName();
String tableName = icebergTable.getRemoteTableName();
tables.remove(TableIdentifier.of(dbName, tableName));
try {
icebergCatalog.refreshTable(dbName, tableName, jobPlanningExecutor);
} catch (Exception e) {
LOG.error("Failed to refresh table {}.{}.{}. invalidate cache", catalogName, dbName, tableName, e);
icebergCatalog.invalidateCache(new CachingIcebergCatalog.IcebergTableName(dbName, tableName));
}
}
}
private void refreshTableWithResource(Table table) {
IcebergTable icebergTable = (IcebergTable) table;
org.apache.iceberg.Table nativeTable = icebergTable.getNativeTable();
try {
if (nativeTable instanceof BaseTable) {
BaseTable baseTable = (BaseTable) nativeTable;
if (baseTable.operations().refresh() == null) {
throw new NoSuchTableException("No such table: %s", nativeTable.name());
}
} else {
throw new StarRocksConnectorException("Invalid table type of %s, it should be a BaseTable!", nativeTable.name());
}
} catch (NoSuchTableException e) {
throw new StarRocksConnectorException("No such table %s", nativeTable.name());
} catch (IllegalStateException ei) {
throw new StarRocksConnectorException("Refresh table %s with failure, the table under hood" +
" may have been dropped. You should re-create the external table. cause %s",
nativeTable.name(), ei.getMessage());
}
icebergTable.resetSnapshot();
}
@Override
public void finishSink(String dbName, String tableName, List<TSinkCommitInfo> commitInfos) {
boolean isOverwrite = false;
if (!commitInfos.isEmpty()) {
TSinkCommitInfo sinkCommitInfo = commitInfos.get(0);
if (sinkCommitInfo.isSetIs_overwrite()) {
isOverwrite = sinkCommitInfo.is_overwrite;
}
}
List<TIcebergDataFile> dataFiles = commitInfos.stream()
.map(TSinkCommitInfo::getIceberg_data_file).collect(Collectors.toList());
IcebergTable table = (IcebergTable) getTable(dbName, tableName);
org.apache.iceberg.Table nativeTbl = table.getNativeTable();
Transaction transaction = nativeTbl.newTransaction();
BatchWrite batchWrite = getBatchWrite(transaction, isOverwrite);
PartitionSpec partitionSpec = nativeTbl.spec();
for (TIcebergDataFile dataFile : dataFiles) {
Metrics metrics = IcebergApiConverter.buildDataFileMetrics(dataFile);
DataFiles.Builder builder =
DataFiles.builder(partitionSpec)
.withMetrics(metrics)
.withPath(dataFile.path)
.withFormat(dataFile.format)
.withRecordCount(dataFile.record_count)
.withFileSizeInBytes(dataFile.file_size_in_bytes)
.withSplitOffsets(dataFile.split_offsets);
if (partitionSpec.isPartitioned()) {
String relativePartitionLocation = getIcebergRelativePartitionPath(
nativeTbl.location(), dataFile.partition_path);
PartitionData partitionData = partitionDataFromPath(
relativePartitionLocation, partitionSpec);
builder.withPartition(partitionData);
}
batchWrite.addFile(builder.build());
}
try {
batchWrite.commit();
transaction.commitTransaction();
asyncRefreshOthersFeMetadataCache(dbName, tableName);
} catch (Exception e) {
List<String> toDeleteFiles = dataFiles.stream()
.map(TIcebergDataFile::getPath)
.collect(Collectors.toList());
icebergCatalog.deleteUncommittedDataFiles(toDeleteFiles);
LOG.error("Failed to commit iceberg transaction on {}.{}", dbName, tableName, e);
throw new StarRocksConnectorException(e.getMessage());
} finally {
icebergCatalog.invalidateCacheWithoutTable(new CachingIcebergCatalog.IcebergTableName(dbName, tableName));
}
}
private void asyncRefreshOthersFeMetadataCache(String dbName, String tableName) {
refreshOtherFeExecutor.execute(() -> {
LOG.info("Start to refresh others fe iceberg metadata cache on {}.{}.{}", catalogName, dbName, tableName);
try {
GlobalStateMgr.getCurrentState().refreshOthersFeTable(
new TableName(catalogName, dbName, tableName), new ArrayList<>(), false);
} catch (DdlException e) {
LOG.error("Failed to refresh others fe iceberg metadata cache {}.{}.{}", catalogName, dbName, tableName, e);
throw new StarRocksConnectorException(e.getMessage());
}
LOG.info("Finish to refresh others fe iceberg metadata cache on {}.{}.{}", catalogName, dbName, tableName);
});
}
public BatchWrite getBatchWrite(Transaction transaction, boolean isOverwrite) {
return isOverwrite ? new DynamicOverwrite(transaction) : new Append(transaction);
}
public static PartitionData partitionDataFromPath(String relativePartitionPath, PartitionSpec spec) {
PartitionData data = new PartitionData(spec.fields().size());
String[] partitions = relativePartitionPath.split("/", -1);
List<PartitionField> partitionFields = spec.fields();
for (int i = 0; i < partitions.length; i++) {
PartitionField field = partitionFields.get(i);
String[] parts = partitions[i].split("=", 2);
Preconditions.checkArgument(parts.length == 2 && parts[0] != null &&
field.name().equals(parts[0]), "Invalid partition: %s", partitions[i]);
org.apache.iceberg.types.Type sourceType = spec.partitionType().fields().get(i).type();
data.set(i, Conversions.fromPartitionString(sourceType, parts[1]));
}
return data;
}
public static String getIcebergRelativePartitionPath(String tableLocation, String partitionLocation) {
tableLocation = tableLocation.endsWith("/") ? tableLocation.substring(0, tableLocation.length() - 1) : tableLocation;
String tableLocationWithData = tableLocation + "/data/";
String path = PartitionUtil.getSuffixName(tableLocationWithData, partitionLocation);
if (path.startsWith("/")) {
path = path.substring(1);
}
if (path.endsWith("/")) {
path = path.substring(0, path.length() - 1);
}
return path;
}
public static boolean onlyHasPartitionPredicate(Table table, ScalarOperator predicate) {
if (predicate == null) {
return true;
}
List<ColumnRefOperator> columnRefOperators = predicate.getColumnRefs();
List<String> partitionColNames = table.getPartitionColumnNames();
for (ColumnRefOperator c : columnRefOperators) {
if (!partitionColNames.contains(c.getName())) {
return false;
}
}
return true;
}
private boolean enablePruneManifest() {
if (ConnectContext.get() == null) {
return false;
}
if (ConnectContext.get().getSessionVariable() == null) {
return false;
}
return ConnectContext.get().getSessionVariable().isEnablePruneIcebergManifest();
}
private boolean enableCollectColumnStatistics() {
if (ConnectContext.get() == null) {
return false;
}
if (ConnectContext.get().getSessionVariable() == null) {
return false;
}
return ConnectContext.get().getSessionVariable().enableIcebergColumnStatistics();
}
@Override
public void clear() {
splitTasks.clear();
databases.clear();
tables.clear();
scannedTables.clear();
metricsReporter.clear();
}
interface BatchWrite {
void addFile(DataFile file);
void commit();
}
static class Append implements BatchWrite {
private final AppendFiles append;
public Append(Transaction txn) {
append = txn.newAppend();
}
@Override
public void addFile(DataFile file) {
append.appendFile(file);
}
@Override
public void commit() {
append.commit();
}
}
static class DynamicOverwrite implements BatchWrite {
private final ReplacePartitions replace;
public DynamicOverwrite(Transaction txn) {
replace = txn.newReplacePartitions();
}
@Override
public void addFile(DataFile file) {
replace.addFile(file);
}
@Override
public void commit() {
replace.commit();
}
}
public static class PartitionData implements StructLike {
private final Object[] values;
private PartitionData(int size) {
this.values = new Object[size];
}
@Override
public int size() {
return values.length;
}
@Override
public <T> T get(int pos, Class<T> javaClass) {
return javaClass.cast(values[pos]);
}
@Override
public <T> void set(int pos, T value) {
if (value instanceof ByteBuffer) {
ByteBuffer buffer = (ByteBuffer) value;
byte[] bytes = new byte[buffer.remaining()];
buffer.duplicate().get(bytes);
values[pos] = bytes;
} else {
values[pos] = value;
}
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
PartitionData that = (PartitionData) other;
return Arrays.equals(values, that.values);
}
@Override
public int hashCode() {
return Arrays.hashCode(values);
}
}
@Override
public CloudConfiguration getCloudConfiguration() {
return hdfsEnvironment.getCloudConfiguration();
}
private static class FileScanTaskSchema {
private final String dbName;
private final String tableName;
private final int schemaId;
private final int specId;
public FileScanTaskSchema(String dbName, String tableName, int schemaId, int specId) {
this.dbName = dbName;
this.tableName = tableName;
this.schemaId = schemaId;
this.specId = specId;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
FileScanTaskSchema that = (FileScanTaskSchema) o;
return schemaId == that.schemaId && specId == that.specId &&
Objects.equals(dbName, that.dbName) && Objects.equals(tableName, that.tableName);
}
@Override
public int hashCode() {
return Objects.hash(dbName, tableName, schemaId, specId);
}
}
} |
Ok, thanks for the reminder! | public void run(SourceContext<OUT> ctx) throws Exception {
final RMQCollector collector = new RMQCollector(ctx);
while (running) {
QueueingConsumer.Delivery delivery = consumer.nextDelivery();
synchronized (ctx.getCheckpointLock()) {
if (!autoAck) {
final long deliveryTag = delivery.getEnvelope().getDeliveryTag();
if (usesCorrelationId) {
final String correlationId = delivery.getProperties().getCorrelationId();
Preconditions.checkNotNull(correlationId, "RabbitMQ source was instantiated " +
"with usesCorrelationId set to true but a message was received with " +
"correlation id set to null!");
if (!addId(correlationId)) {
continue;
}
}
sessionIds.add(deliveryTag);
}
schema.deserialize(delivery.getBody(), collector);
if (collector.isEndOfStreamSignalled()) {
this.running = false;
return;
}
}
}
} | schema.deserialize(delivery.getBody(), collector); | public void run(SourceContext<OUT> ctx) throws Exception {
final RMQCollector collector = new RMQCollector(ctx);
while (running) {
QueueingConsumer.Delivery delivery = consumer.nextDelivery();
synchronized (ctx.getCheckpointLock()) {
if (!autoAck) {
final long deliveryTag = delivery.getEnvelope().getDeliveryTag();
if (usesCorrelationId) {
final String correlationId = delivery.getProperties().getCorrelationId();
Preconditions.checkNotNull(correlationId, "RabbitMQ source was instantiated " +
"with usesCorrelationId set to true but a message was received with " +
"correlation id set to null!");
if (!addId(correlationId)) {
continue;
}
}
sessionIds.add(deliveryTag);
}
schema.deserialize(delivery.getBody(), collector);
if (collector.isEndOfStreamSignalled()) {
this.running = false;
return;
}
}
}
} | class RMQSource<OUT> extends MultipleIdsMessageAcknowledgingSourceBase<OUT, String, Long>
implements ResultTypeQueryable<OUT> {
private static final long serialVersionUID = 1L;
private static final Logger LOG = LoggerFactory.getLogger(RMQSource.class);
private final RMQConnectionConfig rmqConnectionConfig;
protected final String queueName;
private final boolean usesCorrelationId;
protected DeserializationSchema<OUT> schema;
protected transient Connection connection;
protected transient Channel channel;
protected transient QueueingConsumer consumer;
protected transient boolean autoAck;
private transient volatile boolean running;
/**
* Creates a new RabbitMQ source with at-least-once message processing guarantee when
* checkpointing is enabled. No strong delivery guarantees when checkpointing is disabled.
*
* <p>For exactly-once, please use the constructor
* {@link RMQSource
* @param rmqConnectionConfig The RabbiMQ connection configuration {@link RMQConnectionConfig}.
* @param queueName The queue to receive messages from.
* @param deserializationSchema A {@link DeserializationSchema} for turning the bytes received
* into Java objects.
*/
public RMQSource(RMQConnectionConfig rmqConnectionConfig, String queueName,
DeserializationSchema<OUT> deserializationSchema) {
this(rmqConnectionConfig, queueName, false, deserializationSchema);
}
/**
* Creates a new RabbitMQ source. For exactly-once, you must set the correlation ids of messages
* at the producer. The correlation id must be unique. Otherwise the behavior of the source is
* undefined. If in doubt, set usesCorrelationId to false. When correlation ids are not
* used, this source has at-least-once processing semantics when checkpointing is enabled.
* @param rmqConnectionConfig The RabbiMQ connection configuration {@link RMQConnectionConfig}.
* @param queueName The queue to receive messages from.
* @param usesCorrelationId Whether the messages received are supplied with a <b>unique</b>
* id to deduplicate messages (in case of failed acknowledgments).
* Only used when checkpointing is enabled.
* @param deserializationSchema A {@link DeserializationSchema} for turning the bytes received
* into Java objects.
*/
public RMQSource(RMQConnectionConfig rmqConnectionConfig,
String queueName, boolean usesCorrelationId, DeserializationSchema<OUT> deserializationSchema) {
super(String.class);
this.rmqConnectionConfig = rmqConnectionConfig;
this.queueName = queueName;
this.usesCorrelationId = usesCorrelationId;
this.schema = deserializationSchema;
}
/**
* Initializes the connection to RMQ with a default connection factory. The user may override
* this method to setup and configure their own ConnectionFactory.
*/
protected ConnectionFactory setupConnectionFactory() throws Exception {
return rmqConnectionConfig.getConnectionFactory();
}
/**
* Sets up the queue. The default implementation just declares the queue. The user may override
* this method to have a custom setup for the queue (i.e. binding the queue to an exchange or
* defining custom queue parameters)
*/
protected void setupQueue() throws IOException {
Util.declareQueueDefaults(channel, queueName);
}
@Override
public void open(Configuration config) throws Exception {
super.open(config);
ConnectionFactory factory = setupConnectionFactory();
try {
connection = factory.newConnection();
channel = connection.createChannel();
if (channel == null) {
throw new RuntimeException("None of RabbitMQ channels are available");
}
setupQueue();
consumer = new QueueingConsumer(channel);
RuntimeContext runtimeContext = getRuntimeContext();
if (runtimeContext instanceof StreamingRuntimeContext
&& ((StreamingRuntimeContext) runtimeContext).isCheckpointingEnabled()) {
autoAck = false;
channel.txSelect();
} else {
autoAck = true;
}
LOG.debug("Starting RabbitMQ source with autoAck status: " + autoAck);
channel.basicConsume(queueName, autoAck, consumer);
} catch (IOException e) {
throw new RuntimeException("Cannot create RMQ connection with " + queueName + " at "
+ rmqConnectionConfig.getHost(), e);
}
this.schema.open(() -> getRuntimeContext().getMetricGroup().addGroup("user"));
running = true;
}
@Override
public void close() throws Exception {
super.close();
try {
if (consumer != null && channel != null) {
channel.basicCancel(consumer.getConsumerTag());
}
} catch (IOException e) {
throw new RuntimeException("Error while cancelling RMQ consumer on " + queueName
+ " at " + rmqConnectionConfig.getHost(), e);
}
try {
if (channel != null) {
channel.close();
}
} catch (IOException e) {
throw new RuntimeException("Error while closing RMQ channel with " + queueName
+ " at " + rmqConnectionConfig.getHost(), e);
}
try {
if (connection != null) {
connection.close();
}
} catch (IOException e) {
throw new RuntimeException("Error while closing RMQ connection with " + queueName
+ " at " + rmqConnectionConfig.getHost(), e);
}
}
@Override
private class RMQCollector implements Collector<OUT> {
private final SourceContext<OUT> ctx;
private boolean endOfStreamSignalled = false;
private RMQCollector(SourceContext<OUT> ctx) {
this.ctx = ctx;
}
@Override
public void collect(OUT record) {
if (endOfStreamSignalled || schema.isEndOfStream(record)) {
this.endOfStreamSignalled = true;
return;
}
ctx.collect(record);
}
public boolean isEndOfStreamSignalled() {
return endOfStreamSignalled;
}
@Override
public void close() {
}
}
@Override
public void cancel() {
running = false;
}
@Override
protected void acknowledgeSessionIDs(List<Long> sessionIds) {
try {
for (long id : sessionIds) {
channel.basicAck(id, false);
}
channel.txCommit();
} catch (IOException e) {
throw new RuntimeException("Messages could not be acknowledged during checkpoint creation.", e);
}
}
@Override
public TypeInformation<OUT> getProducedType() {
return schema.getProducedType();
}
} | class RMQSource<OUT> extends MultipleIdsMessageAcknowledgingSourceBase<OUT, String, Long>
implements ResultTypeQueryable<OUT> {
private static final long serialVersionUID = 1L;
private static final Logger LOG = LoggerFactory.getLogger(RMQSource.class);
private final RMQConnectionConfig rmqConnectionConfig;
protected final String queueName;
private final boolean usesCorrelationId;
protected DeserializationSchema<OUT> schema;
protected transient Connection connection;
protected transient Channel channel;
protected transient QueueingConsumer consumer;
protected transient boolean autoAck;
private transient volatile boolean running;
/**
* Creates a new RabbitMQ source with at-least-once message processing guarantee when
* checkpointing is enabled. No strong delivery guarantees when checkpointing is disabled.
*
* <p>For exactly-once, please use the constructor
* {@link RMQSource
* @param rmqConnectionConfig The RabbiMQ connection configuration {@link RMQConnectionConfig}.
* @param queueName The queue to receive messages from.
* @param deserializationSchema A {@link DeserializationSchema} for turning the bytes received
* into Java objects.
*/
public RMQSource(RMQConnectionConfig rmqConnectionConfig, String queueName,
DeserializationSchema<OUT> deserializationSchema) {
this(rmqConnectionConfig, queueName, false, deserializationSchema);
}
/**
* Creates a new RabbitMQ source. For exactly-once, you must set the correlation ids of messages
* at the producer. The correlation id must be unique. Otherwise the behavior of the source is
* undefined. If in doubt, set usesCorrelationId to false. When correlation ids are not
* used, this source has at-least-once processing semantics when checkpointing is enabled.
* @param rmqConnectionConfig The RabbiMQ connection configuration {@link RMQConnectionConfig}.
* @param queueName The queue to receive messages from.
* @param usesCorrelationId Whether the messages received are supplied with a <b>unique</b>
* id to deduplicate messages (in case of failed acknowledgments).
* Only used when checkpointing is enabled.
* @param deserializationSchema A {@link DeserializationSchema} for turning the bytes received
* into Java objects.
*/
public RMQSource(RMQConnectionConfig rmqConnectionConfig,
String queueName, boolean usesCorrelationId, DeserializationSchema<OUT> deserializationSchema) {
super(String.class);
this.rmqConnectionConfig = rmqConnectionConfig;
this.queueName = queueName;
this.usesCorrelationId = usesCorrelationId;
this.schema = deserializationSchema;
}
/**
* Initializes the connection to RMQ with a default connection factory. The user may override
* this method to setup and configure their own ConnectionFactory.
*/
protected ConnectionFactory setupConnectionFactory() throws Exception {
return rmqConnectionConfig.getConnectionFactory();
}
/**
* Sets up the queue. The default implementation just declares the queue. The user may override
* this method to have a custom setup for the queue (i.e. binding the queue to an exchange or
* defining custom queue parameters)
*/
protected void setupQueue() throws IOException {
Util.declareQueueDefaults(channel, queueName);
}
@Override
public void open(Configuration config) throws Exception {
super.open(config);
ConnectionFactory factory = setupConnectionFactory();
try {
connection = factory.newConnection();
channel = connection.createChannel();
if (channel == null) {
throw new RuntimeException("None of RabbitMQ channels are available");
}
setupQueue();
consumer = new QueueingConsumer(channel);
RuntimeContext runtimeContext = getRuntimeContext();
if (runtimeContext instanceof StreamingRuntimeContext
&& ((StreamingRuntimeContext) runtimeContext).isCheckpointingEnabled()) {
autoAck = false;
channel.txSelect();
} else {
autoAck = true;
}
LOG.debug("Starting RabbitMQ source with autoAck status: " + autoAck);
channel.basicConsume(queueName, autoAck, consumer);
} catch (IOException e) {
throw new RuntimeException("Cannot create RMQ connection with " + queueName + " at "
+ rmqConnectionConfig.getHost(), e);
}
this.schema.open(() -> getRuntimeContext().getMetricGroup().addGroup("user"));
running = true;
}
@Override
public void close() throws Exception {
super.close();
try {
if (consumer != null && channel != null) {
channel.basicCancel(consumer.getConsumerTag());
}
} catch (IOException e) {
throw new RuntimeException("Error while cancelling RMQ consumer on " + queueName
+ " at " + rmqConnectionConfig.getHost(), e);
}
try {
if (channel != null) {
channel.close();
}
} catch (IOException e) {
throw new RuntimeException("Error while closing RMQ channel with " + queueName
+ " at " + rmqConnectionConfig.getHost(), e);
}
try {
if (connection != null) {
connection.close();
}
} catch (IOException e) {
throw new RuntimeException("Error while closing RMQ connection with " + queueName
+ " at " + rmqConnectionConfig.getHost(), e);
}
}
@Override
private class RMQCollector implements Collector<OUT> {
private final SourceContext<OUT> ctx;
private boolean endOfStreamSignalled = false;
private RMQCollector(SourceContext<OUT> ctx) {
this.ctx = ctx;
}
@Override
public void collect(OUT record) {
if (endOfStreamSignalled || schema.isEndOfStream(record)) {
this.endOfStreamSignalled = true;
return;
}
ctx.collect(record);
}
public boolean isEndOfStreamSignalled() {
return endOfStreamSignalled;
}
@Override
public void close() {
}
}
@Override
public void cancel() {
running = false;
}
@Override
protected void acknowledgeSessionIDs(List<Long> sessionIds) {
try {
for (long id : sessionIds) {
channel.basicAck(id, false);
}
channel.txCommit();
} catch (IOException e) {
throw new RuntimeException("Messages could not be acknowledged during checkpoint creation.", e);
}
}
@Override
public TypeInformation<OUT> getProducedType() {
return schema.getProducedType();
}
} |
Can we use the tenant_iam_role flag for this as well? | private void addParameterStoreValidationHandler(ApplicationContainerCluster cluster, DeployState deployState) {
if (deployState.zone().system() == SystemName.PublicCd) {
BindingPattern bindingPattern = SystemBindingPattern.fromHttpPath("/validate-secret-store");
Handler<AbstractConfigProducer<?>> handler = new Handler<>(
new ComponentModel("com.yahoo.jdisc.cloud.aws.AwsParameterStoreValidationHandler", null, null, null));
handler.addServerBindings(bindingPattern);
cluster.addComponent(handler);
}
} | if (deployState.zone().system() == SystemName.PublicCd) { | private void addParameterStoreValidationHandler(ApplicationContainerCluster cluster, DeployState deployState) {
if (deployState.featureFlags().tenantIamRole()) {
BindingPattern bindingPattern = SystemBindingPattern.fromHttpPath("/validate-secret-store");
Handler<AbstractConfigProducer<?>> handler = new Handler<>(
new ComponentModel("com.yahoo.jdisc.cloud.aws.AwsParameterStoreValidationHandler", null, null, null));
handler.addServerBindings(bindingPattern);
cluster.addComponent(handler);
}
} | class ContainerModelBuilder extends ConfigModelBuilder<ContainerModel> {
static final String HOSTED_VESPA_STATUS_FILE = Defaults.getDefaults().underVespaHome("var/vespa/load-balancer/status.html");
private static final String HOSTED_VESPA_STATUS_FILE_SETTING = "VESPA_LB_STATUS_FILE";
private static final String CONTAINER_TAG = "container";
private static final String DEPRECATED_CONTAINER_TAG = "jdisc";
private static final String ENVIRONMENT_VARIABLES_ELEMENT = "environment-variables";
private static final int MIN_ZOOKEEPER_NODE_COUNT = 1;
private static final int MAX_ZOOKEEPER_NODE_COUNT = 7;
public enum Networking { disable, enable }
private ApplicationPackage app;
private final boolean standaloneBuilder;
private final Networking networking;
private final boolean rpcServerEnabled;
private final boolean httpServerEnabled;
protected DeployLogger log;
public static final List<ConfigModelId> configModelIds =
ImmutableList.of(ConfigModelId.fromName(CONTAINER_TAG), ConfigModelId.fromName(DEPRECATED_CONTAINER_TAG));
private static final String xmlRendererId = RendererRegistry.xmlRendererId.getName();
private static final String jsonRendererId = RendererRegistry.jsonRendererId.getName();
public ContainerModelBuilder(boolean standaloneBuilder, Networking networking) {
super(ContainerModel.class);
this.standaloneBuilder = standaloneBuilder;
this.networking = networking;
this.rpcServerEnabled = !standaloneBuilder;
this.httpServerEnabled = networking == Networking.enable;
}
@Override
public List<ConfigModelId> handlesElements() {
return configModelIds;
}
@Override
public void doBuild(ContainerModel model, Element spec, ConfigModelContext modelContext) {
log = modelContext.getDeployLogger();
app = modelContext.getApplicationPackage();
checkVersion(spec);
checkTagName(spec, log);
ApplicationContainerCluster cluster = createContainerCluster(spec, modelContext);
addClusterContent(cluster, spec, modelContext);
cluster.setMessageBusEnabled(rpcServerEnabled);
cluster.setRpcServerEnabled(rpcServerEnabled);
cluster.setHttpServerEnabled(httpServerEnabled);
model.setCluster(cluster);
}
private ApplicationContainerCluster createContainerCluster(Element spec, ConfigModelContext modelContext) {
return new VespaDomBuilder.DomConfigProducerBuilder<ApplicationContainerCluster>() {
@Override
protected ApplicationContainerCluster doBuild(DeployState deployState, AbstractConfigProducer ancestor, Element producerSpec) {
return new ApplicationContainerCluster(ancestor, modelContext.getProducerId(),
modelContext.getProducerId(), deployState);
}
}.build(modelContext.getDeployState(), modelContext.getParentProducer(), spec);
}
private void addClusterContent(ApplicationContainerCluster cluster, Element spec, ConfigModelContext context) {
DeployState deployState = context.getDeployState();
DocumentFactoryBuilder.buildDocumentFactories(cluster, spec);
addConfiguredComponents(deployState, cluster, spec);
addSecretStore(cluster, spec);
addRestApis(deployState, spec, cluster);
addServlets(deployState, spec, cluster);
addModelEvaluation(spec, cluster, context);
addProcessing(deployState, spec, cluster);
addSearch(deployState, spec, cluster);
addDocproc(deployState, spec, cluster);
addDocumentApi(spec, cluster);
cluster.addDefaultHandlersExceptStatus();
addStatusHandlers(cluster, context.getDeployState().isHosted());
addUserHandlers(deployState, cluster, spec);
addHttp(deployState, spec, cluster, context);
addAccessLogs(deployState, cluster, spec);
addRoutingAliases(cluster, spec, deployState.zone().environment());
addNodes(cluster, spec, context);
addClientProviders(deployState, spec, cluster);
addServerProviders(deployState, spec, cluster);
addAthensCopperArgos(cluster, context);
addZooKeeper(cluster, spec);
addParameterStoreValidationHandler(cluster, deployState);
}
private void addZooKeeper(ApplicationContainerCluster cluster, Element spec) {
if (!hasZooKeeper(spec)) return;
Element nodesElement = XML.getChild(spec, "nodes");
boolean isCombined = nodesElement != null && nodesElement.hasAttribute("of");
if (isCombined) {
throw new IllegalArgumentException("A combined cluster cannot run ZooKeeper");
}
long nonRetiredNodes = cluster.getContainers().stream().filter(c -> !c.isRetired()).count();
if (nonRetiredNodes < MIN_ZOOKEEPER_NODE_COUNT || nonRetiredNodes > MAX_ZOOKEEPER_NODE_COUNT || nonRetiredNodes % 2 == 0) {
throw new IllegalArgumentException("Cluster with ZooKeeper needs an odd number of nodes, between " +
MIN_ZOOKEEPER_NODE_COUNT + " and " + MAX_ZOOKEEPER_NODE_COUNT +
", have " + nonRetiredNodes + " non-retired");
}
cluster.addSimpleComponent("com.yahoo.vespa.curator.Curator", null, "zkfacade");
cluster.getContainers().forEach(ContainerModelBuilder::addReconfigurableZooKeeperServerComponents);
}
public static void addReconfigurableZooKeeperServerComponents(Container container) {
container.addComponent(zookeeperComponent("com.yahoo.vespa.zookeeper.ReconfigurableVespaZooKeeperServer", container));
container.addComponent(zookeeperComponent("com.yahoo.vespa.zookeeper.Reconfigurer", container));
container.addComponent(zookeeperComponent("com.yahoo.vespa.zookeeper.VespaZooKeeperAdminImpl", container));
}
private static SimpleComponent zookeeperComponent(String idSpec, Container container) {
String configId = container.getConfigId();
return new SimpleComponent(new ComponentModel(idSpec, null, "zookeeper-server", configId));
}
private void addSecretStore(ApplicationContainerCluster cluster, Element spec) {
Element secretStoreElement = XML.getChild(spec, "secret-store");
if (secretStoreElement != null) {
SecretStore secretStore = new SecretStore();
for (Element group : XML.getChildren(secretStoreElement, "group")) {
secretStore.addGroup(group.getAttribute("name"), group.getAttribute("environment"));
}
cluster.setSecretStore(secretStore);
}
}
private void addAthensCopperArgos(ApplicationContainerCluster cluster, ConfigModelContext context) {
if ( ! context.getDeployState().isHosted()) return;
app.getDeployment().map(DeploymentSpec::fromXml)
.ifPresent(deploymentSpec -> {
addIdentityProvider(cluster,
context.getDeployState().getProperties().configServerSpecs(),
context.getDeployState().getProperties().loadBalancerName(),
context.getDeployState().getProperties().ztsUrl(),
context.getDeployState().getProperties().athenzDnsSuffix(),
context.getDeployState().zone(),
deploymentSpec);
addRotationProperties(cluster, context.getDeployState().zone(), context.getDeployState().getEndpoints(), deploymentSpec);
});
}
private void addRotationProperties(ApplicationContainerCluster cluster, Zone zone, Set<ContainerEndpoint> endpoints, DeploymentSpec spec) {
cluster.getContainers().forEach(container -> {
setRotations(container, endpoints, cluster.getName());
container.setProp("activeRotation", Boolean.toString(zoneHasActiveRotation(zone, spec)));
});
}
private boolean zoneHasActiveRotation(Zone zone, DeploymentSpec spec) {
Optional<DeploymentInstanceSpec> instance = spec.instance(app.getApplicationId().instance());
if (instance.isEmpty()) return false;
return instance.get().zones().stream()
.anyMatch(declaredZone -> declaredZone.concerns(zone.environment(), Optional.of(zone.region())) &&
declaredZone.active());
}
private void setRotations(Container container, Set<ContainerEndpoint> endpoints, String containerClusterName) {
var rotationsProperty = endpoints.stream()
.filter(endpoint -> endpoint.clusterId().equals(containerClusterName))
.flatMap(endpoint -> endpoint.names().stream())
.collect(Collectors.toUnmodifiableSet());
container.setProp("rotations", String.join(",", rotationsProperty));
}
private void addRoutingAliases(ApplicationContainerCluster cluster, Element spec, Environment environment) {
if (environment != Environment.prod) return;
Element aliases = XML.getChild(spec, "aliases");
for (Element alias : XML.getChildren(aliases, "service-alias")) {
cluster.serviceAliases().add(XML.getValue(alias));
}
for (Element alias : XML.getChildren(aliases, "endpoint-alias")) {
cluster.endpointAliases().add(XML.getValue(alias));
}
}
private void addConfiguredComponents(DeployState deployState, ApplicationContainerCluster cluster, Element spec) {
for (Element components : XML.getChildren(spec, "components")) {
addIncludes(components);
addConfiguredComponents(deployState, cluster, components, "component");
}
addConfiguredComponents(deployState, cluster, spec, "component");
}
protected void addStatusHandlers(ApplicationContainerCluster cluster, boolean isHostedVespa) {
if (isHostedVespa) {
String name = "status.html";
Optional<String> statusFile = Optional.ofNullable(System.getenv(HOSTED_VESPA_STATUS_FILE_SETTING));
cluster.addComponent(
new FileStatusHandlerComponent(
name + "-status-handler",
statusFile.orElse(HOSTED_VESPA_STATUS_FILE),
SystemBindingPattern.fromHttpPath("/" + name)));
} else {
cluster.addVipHandler();
}
}
private void addClientProviders(DeployState deployState, Element spec, ApplicationContainerCluster cluster) {
for (Element clientSpec: XML.getChildren(spec, "client")) {
cluster.addComponent(new DomClientProviderBuilder(cluster).build(deployState, cluster, clientSpec));
}
}
private void addServerProviders(DeployState deployState, Element spec, ApplicationContainerCluster cluster) {
addConfiguredComponents(deployState, cluster, spec, "server");
}
protected void addAccessLogs(DeployState deployState, ApplicationContainerCluster cluster, Element spec) {
List<Element> accessLogElements = getAccessLogElements(spec);
for (Element accessLog : accessLogElements) {
AccessLogBuilder.buildIfNotDisabled(deployState, cluster, accessLog).ifPresent(cluster::addComponent);
}
if (accessLogElements.isEmpty() && deployState.getAccessLoggingEnabledByDefault())
cluster.addDefaultSearchAccessLog();
if (cluster.getAllComponents().stream().anyMatch(component -> component instanceof AccessLogComponent)) {
cluster.addComponent(new ConnectionLogComponent(cluster, FileConnectionLog.class, "qrs"));
} else {
cluster.addComponent(new ConnectionLogComponent(cluster, VoidConnectionLog.class, "qrs"));
}
}
private List<Element> getAccessLogElements(Element spec) {
return XML.getChildren(spec, "accesslog");
}
protected void addHttp(DeployState deployState, Element spec, ApplicationContainerCluster cluster, ConfigModelContext context) {
Element httpElement = XML.getChild(spec, "http");
if (httpElement != null) {
cluster.setHttp(buildHttp(deployState, cluster, httpElement));
}
if (isHostedTenantApplication(context)) {
addHostedImplicitHttpIfNotPresent(cluster);
addHostedImplicitAccessControlIfNotPresent(deployState, cluster);
addDefaultConnectorHostedFilterBinding(cluster);
addAdditionalHostedConnector(deployState, cluster, context);
}
}
private void addDefaultConnectorHostedFilterBinding(ApplicationContainerCluster cluster) {
cluster.getHttp().getAccessControl()
.ifPresent(accessControl -> accessControl.configureDefaultHostedConnector(cluster.getHttp())); ;
}
private void addAdditionalHostedConnector(DeployState deployState, ApplicationContainerCluster cluster, ConfigModelContext context) {
JettyHttpServer server = cluster.getHttp().getHttpServer().get();
String serverName = server.getComponentId().getName();
HostedSslConnectorFactory connectorFactory;
if (deployState.endpointCertificateSecrets().isPresent()) {
boolean authorizeClient = deployState.zone().system().isPublic();
if (authorizeClient && deployState.tlsClientAuthority().isEmpty()) {
throw new RuntimeException("Client certificate authority security/clients.pem is missing - see: https:
}
EndpointCertificateSecrets endpointCertificateSecrets = deployState.endpointCertificateSecrets().get();
boolean enforceHandshakeClientAuth = context.properties().featureFlags().useAccessControlTlsHandshakeClientAuth() &&
cluster.getHttp().getAccessControl()
.map(accessControl -> accessControl.clientAuthentication)
.map(clientAuth -> clientAuth.equals(AccessControl.ClientAuthentication.need))
.orElse(false);
connectorFactory = authorizeClient
? HostedSslConnectorFactory.withProvidedCertificateAndTruststore(serverName, endpointCertificateSecrets, deployState.tlsClientAuthority().get())
: HostedSslConnectorFactory.withProvidedCertificate(serverName, endpointCertificateSecrets, enforceHandshakeClientAuth);
} else {
connectorFactory = HostedSslConnectorFactory.withDefaultCertificateAndTruststore(serverName);
}
cluster.getHttp().getAccessControl().ifPresent(accessControl -> accessControl.configureHostedConnector(connectorFactory));
server.addConnector(connectorFactory);
}
private static boolean isHostedTenantApplication(ConfigModelContext context) {
var deployState = context.getDeployState();
boolean isTesterApplication = deployState.getProperties().applicationId().instance().isTester();
return deployState.isHosted() && context.getApplicationType() == ApplicationType.DEFAULT && !isTesterApplication;
}
private static void addHostedImplicitHttpIfNotPresent(ApplicationContainerCluster cluster) {
if(cluster.getHttp() == null) {
cluster.setHttp(new Http(new FilterChains(cluster)));
}
JettyHttpServer httpServer = cluster.getHttp().getHttpServer().orElse(null);
if (httpServer == null) {
httpServer = new JettyHttpServer(new ComponentId("DefaultHttpServer"), cluster, cluster.isHostedVespa());
cluster.getHttp().setHttpServer(httpServer);
}
int defaultPort = Defaults.getDefaults().vespaWebServicePort();
boolean defaultConnectorPresent = httpServer.getConnectorFactories().stream().anyMatch(connector -> connector.getListenPort() == defaultPort);
if (!defaultConnectorPresent) {
httpServer.addConnector(new ConnectorFactory.Builder("SearchServer", defaultPort).build());
}
}
private void addHostedImplicitAccessControlIfNotPresent(DeployState deployState, ApplicationContainerCluster cluster) {
Http http = cluster.getHttp();
if (http.getAccessControl().isPresent()) return;
AthenzDomain tenantDomain = deployState.getProperties().athenzDomain().orElse(null);
if (tenantDomain == null) return;
new AccessControl.Builder(tenantDomain.value())
.setHandlers(cluster)
.readEnabled(false)
.writeEnabled(false)
.clientAuthentication(AccessControl.ClientAuthentication.need)
.build()
.configureHttpFilterChains(http);
}
private Http buildHttp(DeployState deployState, ApplicationContainerCluster cluster, Element httpElement) {
Http http = new HttpBuilder().build(deployState, cluster, httpElement);
if (networking == Networking.disable)
http.removeAllServers();
return http;
}
private void addRestApis(DeployState deployState, Element spec, ApplicationContainerCluster cluster) {
for (Element restApiElem : XML.getChildren(spec, "rest-api")) {
cluster.addRestApi(
new RestApiBuilder().build(deployState, cluster, restApiElem));
}
}
private void addServlets(DeployState deployState, Element spec, ApplicationContainerCluster cluster) {
for (Element servletElem : XML.getChildren(spec, "servlet"))
cluster.addServlet(new ServletBuilder().build(deployState, cluster, servletElem));
}
private void addDocumentApi(Element spec, ApplicationContainerCluster cluster) {
ContainerDocumentApi containerDocumentApi = buildDocumentApi(cluster, spec);
if (containerDocumentApi == null) return;
cluster.setDocumentApi(containerDocumentApi);
}
private void addDocproc(DeployState deployState, Element spec, ApplicationContainerCluster cluster) {
ContainerDocproc containerDocproc = buildDocproc(deployState, cluster, spec);
if (containerDocproc == null) return;
cluster.setDocproc(containerDocproc);
ContainerDocproc.Options docprocOptions = containerDocproc.options;
cluster.setMbusParams(new ApplicationContainerCluster.MbusParams(
docprocOptions.maxConcurrentFactor, docprocOptions.documentExpansionFactor, docprocOptions.containerCoreMemory));
}
private void addSearch(DeployState deployState, Element spec, ApplicationContainerCluster cluster) {
Element searchElement = XML.getChild(spec, "search");
if (searchElement == null) return;
addIncludes(searchElement);
cluster.setSearch(buildSearch(deployState, cluster, searchElement));
addSearchHandler(cluster, searchElement);
addGUIHandler(cluster);
validateAndAddConfiguredComponents(deployState, cluster, searchElement, "renderer", ContainerModelBuilder::validateRendererElement);
}
private void addModelEvaluation(Element spec, ApplicationContainerCluster cluster, ConfigModelContext context) {
Element modelEvaluationElement = XML.getChild(spec, "model-evaluation");
if (modelEvaluationElement == null) return;
RankProfileList profiles =
context.vespaModel() != null ? context.vespaModel().rankProfileList() : RankProfileList.empty;
cluster.setModelEvaluation(new ContainerModelEvaluation(cluster, profiles));
}
private void addProcessing(DeployState deployState, Element spec, ApplicationContainerCluster cluster) {
Element processingElement = XML.getChild(spec, "processing");
if (processingElement == null) return;
addIncludes(processingElement);
cluster.setProcessingChains(new DomProcessingBuilder(null).build(deployState, cluster, processingElement),
serverBindings(processingElement, ProcessingChains.defaultBindings).toArray(BindingPattern[]::new));
validateAndAddConfiguredComponents(deployState, cluster, processingElement, "renderer", ContainerModelBuilder::validateRendererElement);
}
private ContainerSearch buildSearch(DeployState deployState, ApplicationContainerCluster containerCluster, Element producerSpec) {
SearchChains searchChains = new DomSearchChainsBuilder(null, false)
.build(deployState, containerCluster, producerSpec);
ContainerSearch containerSearch = new ContainerSearch(containerCluster, searchChains, new ContainerSearch.Options());
applyApplicationPackageDirectoryConfigs(deployState.getApplicationPackage(), containerSearch);
containerSearch.setQueryProfiles(deployState.getQueryProfiles());
containerSearch.setSemanticRules(deployState.getSemanticRules());
return containerSearch;
}
private void applyApplicationPackageDirectoryConfigs(ApplicationPackage applicationPackage,ContainerSearch containerSearch) {
PageTemplates.validate(applicationPackage);
containerSearch.setPageTemplates(PageTemplates.create(applicationPackage));
}
private void addUserHandlers(DeployState deployState, ApplicationContainerCluster cluster, Element spec) {
for (Element component: XML.getChildren(spec, "handler")) {
cluster.addComponent(
new DomHandlerBuilder(cluster).build(deployState, cluster, component));
}
}
private void checkVersion(Element spec) {
String version = spec.getAttribute("version");
if ( ! Version.fromString(version).equals(new Version(1))) {
throw new RuntimeException("Expected container version to be 1.0, but got " + version);
}
}
private void checkTagName(Element spec, DeployLogger logger) {
if (spec.getTagName().equals(DEPRECATED_CONTAINER_TAG)) {
logger.log(WARNING, "'" + DEPRECATED_CONTAINER_TAG + "' is deprecated as tag name. Use '" + CONTAINER_TAG + "' instead.");
}
}
private void addNodes(ApplicationContainerCluster cluster, Element spec, ConfigModelContext context) {
if (standaloneBuilder)
addStandaloneNode(cluster);
else
addNodesFromXml(cluster, spec, context);
}
private void addStandaloneNode(ApplicationContainerCluster cluster) {
ApplicationContainer container = new ApplicationContainer(cluster, "standalone", cluster.getContainers().size(), cluster.isHostedVespa());
cluster.addContainers(Collections.singleton(container));
}
static boolean incompatibleGCOptions(String jvmargs) {
Pattern gcAlgorithm = Pattern.compile("-XX:[-+]Use.+GC");
Pattern cmsArgs = Pattern.compile("-XX:[-+]*CMS");
return (gcAlgorithm.matcher(jvmargs).find() ||cmsArgs.matcher(jvmargs).find());
}
private static String buildJvmGCOptions(DeployState deployState, String jvmGCOPtions) {
String options = (jvmGCOPtions != null)
? jvmGCOPtions
: deployState.getProperties().jvmGCOptions();
return (options == null ||options.isEmpty())
? (deployState.isHosted() ? ContainerCluster.CMS : ContainerCluster.G1GC)
: options;
}
private static String getJvmOptions(ApplicationContainerCluster cluster, Element nodesElement, DeployLogger deployLogger) {
String jvmOptions;
if (nodesElement.hasAttribute(VespaDomBuilder.JVM_OPTIONS)) {
jvmOptions = nodesElement.getAttribute(VespaDomBuilder.JVM_OPTIONS);
if (nodesElement.hasAttribute(VespaDomBuilder.JVMARGS_ATTRIB_NAME)) {
String jvmArgs = nodesElement.getAttribute(VespaDomBuilder.JVMARGS_ATTRIB_NAME);
throw new IllegalArgumentException("You have specified both jvm-options='" + jvmOptions + "'" +
" and deprecated jvmargs='" + jvmArgs + "'. Merge jvmargs into jvm-options.");
}
} else {
jvmOptions = nodesElement.getAttribute(VespaDomBuilder.JVMARGS_ATTRIB_NAME);
if (incompatibleGCOptions(jvmOptions)) {
deployLogger.log(WARNING, "You need to move out your GC related options from 'jvmargs' to 'jvm-gc-options'");
cluster.setJvmGCOptions(ContainerCluster.G1GC);
}
}
return jvmOptions;
}
private static String extractAttribute(Element element, String attrName) {
return element.hasAttribute(attrName) ? element.getAttribute(attrName) : null;
}
void extractJvmFromLegacyNodesTag(List<ApplicationContainer> nodes, ApplicationContainerCluster cluster,
Element nodesElement, ConfigModelContext context) {
applyNodesTagJvmArgs(nodes, getJvmOptions(cluster, nodesElement, context.getDeployLogger()));
if (cluster.getJvmGCOptions().isEmpty()) {
String jvmGCOptions = extractAttribute(nodesElement, VespaDomBuilder.JVM_GC_OPTIONS);
cluster.setJvmGCOptions(buildJvmGCOptions(context.getDeployState(), jvmGCOptions));
}
applyMemoryPercentage(cluster, nodesElement.getAttribute(VespaDomBuilder.Allocated_MEMORY_ATTRIB_NAME));
}
void extractJvmTag(List<ApplicationContainer> nodes, ApplicationContainerCluster cluster,
Element jvmElement, ConfigModelContext context) {
applyNodesTagJvmArgs(nodes, jvmElement.getAttribute(VespaDomBuilder.OPTIONS));
applyMemoryPercentage(cluster, jvmElement.getAttribute(VespaDomBuilder.Allocated_MEMORY_ATTRIB_NAME));
String jvmGCOptions = extractAttribute(jvmElement, VespaDomBuilder.GC_OPTIONS);
cluster.setJvmGCOptions(buildJvmGCOptions(context.getDeployState(), jvmGCOptions));
}
/**
* Add nodes to cluster according to the given containerElement.
*
* Note: DO NOT change allocation behaviour to allow version X and Y of the config-model to allocate a different set
* of nodes. Such changes must be guarded by a common condition (e.g. feature flag) so the behaviour can be changed
* simultaneously for all active config models.
*/
private void addNodesFromXml(ApplicationContainerCluster cluster, Element containerElement, ConfigModelContext context) {
Element nodesElement = XML.getChild(containerElement, "nodes");
if (nodesElement == null) {
cluster.addContainers(allocateWithoutNodesTag(cluster, context));
} else {
List<ApplicationContainer> nodes = createNodes(cluster, containerElement, nodesElement, context);
Element jvmElement = XML.getChild(nodesElement, "jvm");
if (jvmElement == null) {
extractJvmFromLegacyNodesTag(nodes, cluster, nodesElement, context);
} else {
extractJvmTag(nodes, cluster, jvmElement, context);
}
applyRoutingAliasProperties(nodes, cluster);
applyDefaultPreload(nodes, nodesElement);
String environmentVars = getEnvironmentVariables(XML.getChild(nodesElement, ENVIRONMENT_VARIABLES_ELEMENT));
if (!environmentVars.isEmpty()) {
cluster.setEnvironmentVars(environmentVars);
}
if (useCpuSocketAffinity(nodesElement))
AbstractService.distributeCpuSocketAffinity(nodes);
cluster.addContainers(nodes);
}
}
private static String getEnvironmentVariables(Element environmentVariables) {
StringBuilder sb = new StringBuilder();
if (environmentVariables != null) {
for (Element var: XML.getChildren(environmentVariables)) {
sb.append(var.getNodeName()).append('=').append(var.getTextContent()).append(' ');
}
}
return sb.toString();
}
private List<ApplicationContainer> createNodes(ApplicationContainerCluster cluster, Element containerElement, Element nodesElement, ConfigModelContext context) {
if (nodesElement.hasAttribute("type"))
return createNodesFromNodeType(cluster, nodesElement, context);
else if (nodesElement.hasAttribute("of"))
return createNodesFromContentServiceReference(cluster, nodesElement, context);
else if (nodesElement.hasAttribute("count"))
return createNodesFromNodeCount(cluster, containerElement, nodesElement, context);
else if (cluster.isHostedVespa() && cluster.getZone().environment().isManuallyDeployed())
return createNodesFromNodeCount(cluster, containerElement, nodesElement, context);
else
return createNodesFromNodeList(context.getDeployState(), cluster, nodesElement);
}
private static void applyRoutingAliasProperties(List<ApplicationContainer> result, ApplicationContainerCluster cluster) {
if (!cluster.serviceAliases().isEmpty()) {
result.forEach(container -> {
container.setProp("servicealiases", cluster.serviceAliases().stream().collect(Collectors.joining(",")));
});
}
if (!cluster.endpointAliases().isEmpty()) {
result.forEach(container -> {
container.setProp("endpointaliases", cluster.endpointAliases().stream().collect(Collectors.joining(",")));
});
}
}
private static void applyMemoryPercentage(ApplicationContainerCluster cluster, String memoryPercentage) {
if (memoryPercentage == null || memoryPercentage.isEmpty()) return;
memoryPercentage = memoryPercentage.trim();
if ( ! memoryPercentage.endsWith("%"))
throw new IllegalArgumentException("The memory percentage given for nodes in " + cluster +
" must be an integer percentage ending by the '%' sign");
memoryPercentage = memoryPercentage.substring(0, memoryPercentage.length()-1).trim();
try {
cluster.setMemoryPercentage(Integer.parseInt(memoryPercentage));
}
catch (NumberFormatException e) {
throw new IllegalArgumentException("The memory percentage given for nodes in " + cluster +
" must be an integer percentage ending by the '%' sign");
}
}
/** Allocate a container cluster without a nodes tag */
private List<ApplicationContainer> allocateWithoutNodesTag(ApplicationContainerCluster cluster, ConfigModelContext context) {
DeployState deployState = context.getDeployState();
HostSystem hostSystem = cluster.hostSystem();
if (deployState.isHosted()) {
ClusterSpec clusterSpec = ClusterSpec.request(ClusterSpec.Type.container,
ClusterSpec.Id.from(cluster.getName()))
.vespaVersion(deployState.getWantedNodeVespaVersion())
.dockerImageRepository(deployState.getWantedDockerImageRepo())
.build();
int nodeCount = deployState.zone().environment().isProduction() ? 2 : 1;
Capacity capacity = Capacity.from(new ClusterResources(nodeCount, 1, NodeResources.unspecified()),
false,
!deployState.getProperties().isBootstrap());
var hosts = hostSystem.allocateHosts(clusterSpec, capacity, log);
return createNodesFromHosts(log, hosts, cluster);
}
return singleHostContainerCluster(cluster, hostSystem.getHost(Container.SINGLENODE_CONTAINER_SERVICESPEC), context);
}
private List<ApplicationContainer> singleHostContainerCluster(ApplicationContainerCluster cluster, HostResource host, ConfigModelContext context) {
ApplicationContainer node = new ApplicationContainer(cluster, "container.0", 0, cluster.isHostedVespa());
node.setHostResource(host);
node.initService(context.getDeployLogger());
return List.of(node);
}
private List<ApplicationContainer> createNodesFromNodeCount(ApplicationContainerCluster cluster, Element containerElement, Element nodesElement, ConfigModelContext context) {
NodesSpecification nodesSpecification = NodesSpecification.from(new ModelElement(nodesElement), context);
Map<HostResource, ClusterMembership> hosts = nodesSpecification.provision(cluster.getRoot().hostSystem(),
ClusterSpec.Type.container,
ClusterSpec.Id.from(cluster.getName()),
log,
hasZooKeeper(containerElement));
return createNodesFromHosts(context.getDeployLogger(), hosts, cluster);
}
private List<ApplicationContainer> createNodesFromNodeType(ApplicationContainerCluster cluster, Element nodesElement, ConfigModelContext context) {
NodeType type = NodeType.valueOf(nodesElement.getAttribute("type"));
ClusterSpec clusterSpec = ClusterSpec.request(ClusterSpec.Type.container, ClusterSpec.Id.from(cluster.getName()))
.vespaVersion(context.getDeployState().getWantedNodeVespaVersion())
.dockerImageRepository(context.getDeployState().getWantedDockerImageRepo())
.build();
Map<HostResource, ClusterMembership> hosts =
cluster.getRoot().hostSystem().allocateHosts(clusterSpec,
Capacity.fromRequiredNodeType(type), log);
return createNodesFromHosts(context.getDeployLogger(), hosts, cluster);
}
private List<ApplicationContainer> createNodesFromContentServiceReference(ApplicationContainerCluster cluster, Element nodesElement, ConfigModelContext context) {
NodesSpecification nodeSpecification;
try {
nodeSpecification = NodesSpecification.from(new ModelElement(nodesElement), context);
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException(cluster + " contains an invalid reference", e);
}
String referenceId = nodesElement.getAttribute("of");
cluster.setHostClusterId(referenceId);
Map<HostResource, ClusterMembership> hosts =
StorageGroup.provisionHosts(nodeSpecification,
referenceId,
cluster.getRoot().hostSystem(),
context.getDeployLogger());
return createNodesFromHosts(context.getDeployLogger(), hosts, cluster);
}
private List<ApplicationContainer> createNodesFromHosts(DeployLogger deployLogger, Map<HostResource, ClusterMembership> hosts, ApplicationContainerCluster cluster) {
List<ApplicationContainer> nodes = new ArrayList<>();
for (Map.Entry<HostResource, ClusterMembership> entry : hosts.entrySet()) {
String id = "container." + entry.getValue().index();
ApplicationContainer container = new ApplicationContainer(cluster, id, entry.getValue().retired(), entry.getValue().index(), cluster.isHostedVespa());
container.setHostResource(entry.getKey());
container.initService(deployLogger);
nodes.add(container);
}
return nodes;
}
private List<ApplicationContainer> createNodesFromNodeList(DeployState deployState, ApplicationContainerCluster cluster, Element nodesElement) {
List<ApplicationContainer> nodes = new ArrayList<>();
int nodeIndex = 0;
for (Element nodeElem: XML.getChildren(nodesElement, "node")) {
nodes.add(new ContainerServiceBuilder("container." + nodeIndex, nodeIndex).build(deployState, cluster, nodeElem));
nodeIndex++;
}
return nodes;
}
private static boolean useCpuSocketAffinity(Element nodesElement) {
if (nodesElement.hasAttribute(VespaDomBuilder.CPU_SOCKET_AFFINITY_ATTRIB_NAME))
return Boolean.parseBoolean(nodesElement.getAttribute(VespaDomBuilder.CPU_SOCKET_AFFINITY_ATTRIB_NAME));
else
return false;
}
private static void applyNodesTagJvmArgs(List<ApplicationContainer> containers, String jvmArgs) {
for (Container container: containers) {
if (container.getAssignedJvmOptions().isEmpty())
container.prependJvmOptions(jvmArgs);
}
}
private static void applyDefaultPreload(List<ApplicationContainer> containers, Element nodesElement) {
if (! nodesElement.hasAttribute(VespaDomBuilder.PRELOAD_ATTRIB_NAME)) return;
for (Container container: containers)
container.setPreLoad(nodesElement.getAttribute(VespaDomBuilder.PRELOAD_ATTRIB_NAME));
}
private void addSearchHandler(ApplicationContainerCluster cluster, Element searchElement) {
cluster.addComponent(new ProcessingHandler<>(cluster.getSearch().getChains(),
"com.yahoo.search.searchchain.ExecutionFactory"));
cluster.addComponent(
new SearchHandler(
cluster,
serverBindings(searchElement, SearchHandler.DEFAULT_BINDING),
ContainerThreadpool.UserOptions.fromXml(searchElement).orElse(null)));
}
private void addGUIHandler(ApplicationContainerCluster cluster) {
Handler<?> guiHandler = new GUIHandler();
guiHandler.addServerBindings(SystemBindingPattern.fromHttpPath(GUIHandler.BINDING_PATH));
cluster.addComponent(guiHandler);
}
private List<BindingPattern> serverBindings(Element searchElement, BindingPattern... defaultBindings) {
List<Element> bindings = XML.getChildren(searchElement, "binding");
if (bindings.isEmpty())
return List.of(defaultBindings);
return toBindingList(bindings);
}
private List<BindingPattern> toBindingList(List<Element> bindingElements) {
List<BindingPattern> result = new ArrayList<>();
for (Element element: bindingElements) {
String text = element.getTextContent().trim();
if (!text.isEmpty())
result.add(UserBindingPattern.fromPattern(text));
}
return result;
}
private ContainerDocumentApi buildDocumentApi(ApplicationContainerCluster cluster, Element spec) {
Element documentApiElement = XML.getChild(spec, "document-api");
if (documentApiElement == null) return null;
ContainerDocumentApi.Options documentApiOptions = DocumentApiOptionsBuilder.build(documentApiElement);
return new ContainerDocumentApi(cluster, documentApiOptions);
}
private ContainerDocproc buildDocproc(DeployState deployState, ApplicationContainerCluster cluster, Element spec) {
Element docprocElement = XML.getChild(spec, "document-processing");
if (docprocElement == null)
return null;
addIncludes(docprocElement);
DocprocChains chains = new DomDocprocChainsBuilder(null, false).build(deployState, cluster, docprocElement);
ContainerDocproc.Options docprocOptions = DocprocOptionsBuilder.build(docprocElement);
return new ContainerDocproc(cluster, chains, docprocOptions, !standaloneBuilder);
}
private void addIncludes(Element parentElement) {
List<Element> includes = XML.getChildren(parentElement, IncludeDirs.INCLUDE);
if (includes.isEmpty()) {
return;
}
if (app == null) {
throw new IllegalArgumentException("Element <include> given in XML config, but no application package given.");
}
for (Element include : includes) {
addInclude(parentElement, include);
}
}
private void addInclude(Element parentElement, Element include) {
String dirName = include.getAttribute(IncludeDirs.DIR);
app.validateIncludeDir(dirName);
List<Element> includedFiles = Xml.allElemsFromPath(app, dirName);
for (Element includedFile : includedFiles) {
List<Element> includedSubElements = XML.getChildren(includedFile);
for (Element includedSubElement : includedSubElements) {
Node copiedNode = parentElement.getOwnerDocument().importNode(includedSubElement, true);
parentElement.appendChild(copiedNode);
}
}
}
private static void addConfiguredComponents(DeployState deployState, ContainerCluster<? extends Container> cluster,
Element spec, String componentName) {
for (Element node : XML.getChildren(spec, componentName)) {
cluster.addComponent(new DomComponentBuilder().build(deployState, cluster, node));
}
}
private static void validateAndAddConfiguredComponents(DeployState deployState,
ContainerCluster<? extends Container> cluster,
Element spec, String componentName,
Consumer<Element> elementValidator) {
for (Element node : XML.getChildren(spec, componentName)) {
elementValidator.accept(node);
cluster.addComponent(new DomComponentBuilder().build(deployState, cluster, node));
}
}
private void addIdentityProvider(ApplicationContainerCluster cluster,
List<ConfigServerSpec> configServerSpecs,
HostName loadBalancerName,
URI ztsUrl,
String athenzDnsSuffix,
Zone zone,
DeploymentSpec spec) {
spec.athenzDomain()
.ifPresent(domain -> {
AthenzService service = spec.instance(app.getApplicationId().instance())
.flatMap(instanceSpec -> instanceSpec.athenzService(zone.environment(), zone.region()))
.or(() -> spec.athenzService())
.orElseThrow(() -> new RuntimeException("Missing Athenz service configuration in instance '" + app.getApplicationId().instance() + "'"));
String zoneDnsSuffix = zone.environment().value() + "-" + zone.region().value() + "." + athenzDnsSuffix;
IdentityProvider identityProvider = new IdentityProvider(domain, service, getLoadBalancerName(loadBalancerName, configServerSpecs), ztsUrl, zoneDnsSuffix, zone);
cluster.addComponent(identityProvider);
cluster.getContainers().forEach(container -> {
container.setProp("identity.domain", domain.value());
container.setProp("identity.service", service.value());
});
});
}
private HostName getLoadBalancerName(HostName loadbalancerName, List<ConfigServerSpec> configServerSpecs) {
return Optional.ofNullable(loadbalancerName)
.orElseGet(
() -> HostName.from(configServerSpecs.stream()
.findFirst()
.map(ConfigServerSpec::getHostName)
.orElse("unknown")
));
}
private static boolean hasZooKeeper(Element spec) {
return XML.getChild(spec, "zookeeper") != null;
}
/** Disallow renderers named "XmlRenderer" or "JsonRenderer" */
private static void validateRendererElement(Element element) {
String idAttr = element.getAttribute("id");
if (idAttr.equals(xmlRendererId) || idAttr.equals(jsonRendererId)) {
throw new IllegalArgumentException(String.format("Renderer id %s is reserved for internal use", idAttr));
}
}
public static boolean isContainerTag(Element element) {
return CONTAINER_TAG.equals(element.getTagName()) || DEPRECATED_CONTAINER_TAG.equals(element.getTagName());
}
} | class ContainerModelBuilder extends ConfigModelBuilder<ContainerModel> {
static final String HOSTED_VESPA_STATUS_FILE = Defaults.getDefaults().underVespaHome("var/vespa/load-balancer/status.html");
private static final String HOSTED_VESPA_STATUS_FILE_SETTING = "VESPA_LB_STATUS_FILE";
private static final String CONTAINER_TAG = "container";
private static final String DEPRECATED_CONTAINER_TAG = "jdisc";
private static final String ENVIRONMENT_VARIABLES_ELEMENT = "environment-variables";
private static final int MIN_ZOOKEEPER_NODE_COUNT = 1;
private static final int MAX_ZOOKEEPER_NODE_COUNT = 7;
public enum Networking { disable, enable }
private ApplicationPackage app;
private final boolean standaloneBuilder;
private final Networking networking;
private final boolean rpcServerEnabled;
private final boolean httpServerEnabled;
protected DeployLogger log;
public static final List<ConfigModelId> configModelIds =
ImmutableList.of(ConfigModelId.fromName(CONTAINER_TAG), ConfigModelId.fromName(DEPRECATED_CONTAINER_TAG));
private static final String xmlRendererId = RendererRegistry.xmlRendererId.getName();
private static final String jsonRendererId = RendererRegistry.jsonRendererId.getName();
public ContainerModelBuilder(boolean standaloneBuilder, Networking networking) {
super(ContainerModel.class);
this.standaloneBuilder = standaloneBuilder;
this.networking = networking;
this.rpcServerEnabled = !standaloneBuilder;
this.httpServerEnabled = networking == Networking.enable;
}
@Override
public List<ConfigModelId> handlesElements() {
return configModelIds;
}
@Override
public void doBuild(ContainerModel model, Element spec, ConfigModelContext modelContext) {
log = modelContext.getDeployLogger();
app = modelContext.getApplicationPackage();
checkVersion(spec);
checkTagName(spec, log);
ApplicationContainerCluster cluster = createContainerCluster(spec, modelContext);
addClusterContent(cluster, spec, modelContext);
cluster.setMessageBusEnabled(rpcServerEnabled);
cluster.setRpcServerEnabled(rpcServerEnabled);
cluster.setHttpServerEnabled(httpServerEnabled);
model.setCluster(cluster);
}
private ApplicationContainerCluster createContainerCluster(Element spec, ConfigModelContext modelContext) {
return new VespaDomBuilder.DomConfigProducerBuilder<ApplicationContainerCluster>() {
@Override
protected ApplicationContainerCluster doBuild(DeployState deployState, AbstractConfigProducer ancestor, Element producerSpec) {
return new ApplicationContainerCluster(ancestor, modelContext.getProducerId(),
modelContext.getProducerId(), deployState);
}
}.build(modelContext.getDeployState(), modelContext.getParentProducer(), spec);
}
private void addClusterContent(ApplicationContainerCluster cluster, Element spec, ConfigModelContext context) {
DeployState deployState = context.getDeployState();
DocumentFactoryBuilder.buildDocumentFactories(cluster, spec);
addConfiguredComponents(deployState, cluster, spec);
addSecretStore(cluster, spec);
addRestApis(deployState, spec, cluster);
addServlets(deployState, spec, cluster);
addModelEvaluation(spec, cluster, context);
addProcessing(deployState, spec, cluster);
addSearch(deployState, spec, cluster);
addDocproc(deployState, spec, cluster);
addDocumentApi(spec, cluster);
cluster.addDefaultHandlersExceptStatus();
addStatusHandlers(cluster, context.getDeployState().isHosted());
addUserHandlers(deployState, cluster, spec);
addHttp(deployState, spec, cluster, context);
addAccessLogs(deployState, cluster, spec);
addRoutingAliases(cluster, spec, deployState.zone().environment());
addNodes(cluster, spec, context);
addClientProviders(deployState, spec, cluster);
addServerProviders(deployState, spec, cluster);
addAthensCopperArgos(cluster, context);
addZooKeeper(cluster, spec);
addParameterStoreValidationHandler(cluster, deployState);
}
private void addZooKeeper(ApplicationContainerCluster cluster, Element spec) {
if (!hasZooKeeper(spec)) return;
Element nodesElement = XML.getChild(spec, "nodes");
boolean isCombined = nodesElement != null && nodesElement.hasAttribute("of");
if (isCombined) {
throw new IllegalArgumentException("A combined cluster cannot run ZooKeeper");
}
long nonRetiredNodes = cluster.getContainers().stream().filter(c -> !c.isRetired()).count();
if (nonRetiredNodes < MIN_ZOOKEEPER_NODE_COUNT || nonRetiredNodes > MAX_ZOOKEEPER_NODE_COUNT || nonRetiredNodes % 2 == 0) {
throw new IllegalArgumentException("Cluster with ZooKeeper needs an odd number of nodes, between " +
MIN_ZOOKEEPER_NODE_COUNT + " and " + MAX_ZOOKEEPER_NODE_COUNT +
", have " + nonRetiredNodes + " non-retired");
}
cluster.addSimpleComponent("com.yahoo.vespa.curator.Curator", null, "zkfacade");
cluster.getContainers().forEach(ContainerModelBuilder::addReconfigurableZooKeeperServerComponents);
}
public static void addReconfigurableZooKeeperServerComponents(Container container) {
container.addComponent(zookeeperComponent("com.yahoo.vespa.zookeeper.ReconfigurableVespaZooKeeperServer", container));
container.addComponent(zookeeperComponent("com.yahoo.vespa.zookeeper.Reconfigurer", container));
container.addComponent(zookeeperComponent("com.yahoo.vespa.zookeeper.VespaZooKeeperAdminImpl", container));
}
private static SimpleComponent zookeeperComponent(String idSpec, Container container) {
String configId = container.getConfigId();
return new SimpleComponent(new ComponentModel(idSpec, null, "zookeeper-server", configId));
}
private void addSecretStore(ApplicationContainerCluster cluster, Element spec) {
Element secretStoreElement = XML.getChild(spec, "secret-store");
if (secretStoreElement != null) {
SecretStore secretStore = new SecretStore();
for (Element group : XML.getChildren(secretStoreElement, "group")) {
secretStore.addGroup(group.getAttribute("name"), group.getAttribute("environment"));
}
cluster.setSecretStore(secretStore);
}
}
private void addAthensCopperArgos(ApplicationContainerCluster cluster, ConfigModelContext context) {
if ( ! context.getDeployState().isHosted()) return;
app.getDeployment().map(DeploymentSpec::fromXml)
.ifPresent(deploymentSpec -> {
addIdentityProvider(cluster,
context.getDeployState().getProperties().configServerSpecs(),
context.getDeployState().getProperties().loadBalancerName(),
context.getDeployState().getProperties().ztsUrl(),
context.getDeployState().getProperties().athenzDnsSuffix(),
context.getDeployState().zone(),
deploymentSpec);
addRotationProperties(cluster, context.getDeployState().zone(), context.getDeployState().getEndpoints(), deploymentSpec);
});
}
private void addRotationProperties(ApplicationContainerCluster cluster, Zone zone, Set<ContainerEndpoint> endpoints, DeploymentSpec spec) {
cluster.getContainers().forEach(container -> {
setRotations(container, endpoints, cluster.getName());
container.setProp("activeRotation", Boolean.toString(zoneHasActiveRotation(zone, spec)));
});
}
private boolean zoneHasActiveRotation(Zone zone, DeploymentSpec spec) {
Optional<DeploymentInstanceSpec> instance = spec.instance(app.getApplicationId().instance());
if (instance.isEmpty()) return false;
return instance.get().zones().stream()
.anyMatch(declaredZone -> declaredZone.concerns(zone.environment(), Optional.of(zone.region())) &&
declaredZone.active());
}
private void setRotations(Container container, Set<ContainerEndpoint> endpoints, String containerClusterName) {
var rotationsProperty = endpoints.stream()
.filter(endpoint -> endpoint.clusterId().equals(containerClusterName))
.flatMap(endpoint -> endpoint.names().stream())
.collect(Collectors.toUnmodifiableSet());
container.setProp("rotations", String.join(",", rotationsProperty));
}
private void addRoutingAliases(ApplicationContainerCluster cluster, Element spec, Environment environment) {
if (environment != Environment.prod) return;
Element aliases = XML.getChild(spec, "aliases");
for (Element alias : XML.getChildren(aliases, "service-alias")) {
cluster.serviceAliases().add(XML.getValue(alias));
}
for (Element alias : XML.getChildren(aliases, "endpoint-alias")) {
cluster.endpointAliases().add(XML.getValue(alias));
}
}
private void addConfiguredComponents(DeployState deployState, ApplicationContainerCluster cluster, Element spec) {
for (Element components : XML.getChildren(spec, "components")) {
addIncludes(components);
addConfiguredComponents(deployState, cluster, components, "component");
}
addConfiguredComponents(deployState, cluster, spec, "component");
}
protected void addStatusHandlers(ApplicationContainerCluster cluster, boolean isHostedVespa) {
if (isHostedVespa) {
String name = "status.html";
Optional<String> statusFile = Optional.ofNullable(System.getenv(HOSTED_VESPA_STATUS_FILE_SETTING));
cluster.addComponent(
new FileStatusHandlerComponent(
name + "-status-handler",
statusFile.orElse(HOSTED_VESPA_STATUS_FILE),
SystemBindingPattern.fromHttpPath("/" + name)));
} else {
cluster.addVipHandler();
}
}
private void addClientProviders(DeployState deployState, Element spec, ApplicationContainerCluster cluster) {
for (Element clientSpec: XML.getChildren(spec, "client")) {
cluster.addComponent(new DomClientProviderBuilder(cluster).build(deployState, cluster, clientSpec));
}
}
private void addServerProviders(DeployState deployState, Element spec, ApplicationContainerCluster cluster) {
addConfiguredComponents(deployState, cluster, spec, "server");
}
protected void addAccessLogs(DeployState deployState, ApplicationContainerCluster cluster, Element spec) {
List<Element> accessLogElements = getAccessLogElements(spec);
for (Element accessLog : accessLogElements) {
AccessLogBuilder.buildIfNotDisabled(deployState, cluster, accessLog).ifPresent(cluster::addComponent);
}
if (accessLogElements.isEmpty() && deployState.getAccessLoggingEnabledByDefault())
cluster.addDefaultSearchAccessLog();
if (cluster.getAllComponents().stream().anyMatch(component -> component instanceof AccessLogComponent)) {
cluster.addComponent(new ConnectionLogComponent(cluster, FileConnectionLog.class, "qrs"));
}
}
private List<Element> getAccessLogElements(Element spec) {
return XML.getChildren(spec, "accesslog");
}
protected void addHttp(DeployState deployState, Element spec, ApplicationContainerCluster cluster, ConfigModelContext context) {
Element httpElement = XML.getChild(spec, "http");
if (httpElement != null) {
cluster.setHttp(buildHttp(deployState, cluster, httpElement));
}
if (isHostedTenantApplication(context)) {
addHostedImplicitHttpIfNotPresent(cluster);
addHostedImplicitAccessControlIfNotPresent(deployState, cluster);
addDefaultConnectorHostedFilterBinding(cluster);
addAdditionalHostedConnector(deployState, cluster, context);
}
}
private void addDefaultConnectorHostedFilterBinding(ApplicationContainerCluster cluster) {
cluster.getHttp().getAccessControl()
.ifPresent(accessControl -> accessControl.configureDefaultHostedConnector(cluster.getHttp())); ;
}
private void addAdditionalHostedConnector(DeployState deployState, ApplicationContainerCluster cluster, ConfigModelContext context) {
JettyHttpServer server = cluster.getHttp().getHttpServer().get();
String serverName = server.getComponentId().getName();
HostedSslConnectorFactory connectorFactory;
if (deployState.endpointCertificateSecrets().isPresent()) {
boolean authorizeClient = deployState.zone().system().isPublic();
if (authorizeClient && deployState.tlsClientAuthority().isEmpty()) {
throw new RuntimeException("Client certificate authority security/clients.pem is missing - see: https:
}
EndpointCertificateSecrets endpointCertificateSecrets = deployState.endpointCertificateSecrets().get();
boolean enforceHandshakeClientAuth = context.properties().featureFlags().useAccessControlTlsHandshakeClientAuth() &&
cluster.getHttp().getAccessControl()
.map(accessControl -> accessControl.clientAuthentication)
.map(clientAuth -> clientAuth.equals(AccessControl.ClientAuthentication.need))
.orElse(false);
connectorFactory = authorizeClient
? HostedSslConnectorFactory.withProvidedCertificateAndTruststore(serverName, endpointCertificateSecrets, deployState.tlsClientAuthority().get())
: HostedSslConnectorFactory.withProvidedCertificate(serverName, endpointCertificateSecrets, enforceHandshakeClientAuth);
} else {
connectorFactory = HostedSslConnectorFactory.withDefaultCertificateAndTruststore(serverName);
}
cluster.getHttp().getAccessControl().ifPresent(accessControl -> accessControl.configureHostedConnector(connectorFactory));
server.addConnector(connectorFactory);
}
private static boolean isHostedTenantApplication(ConfigModelContext context) {
var deployState = context.getDeployState();
boolean isTesterApplication = deployState.getProperties().applicationId().instance().isTester();
return deployState.isHosted() && context.getApplicationType() == ApplicationType.DEFAULT && !isTesterApplication;
}
private static void addHostedImplicitHttpIfNotPresent(ApplicationContainerCluster cluster) {
if(cluster.getHttp() == null) {
cluster.setHttp(new Http(new FilterChains(cluster)));
}
JettyHttpServer httpServer = cluster.getHttp().getHttpServer().orElse(null);
if (httpServer == null) {
httpServer = new JettyHttpServer(new ComponentId("DefaultHttpServer"), cluster, cluster.isHostedVespa());
cluster.getHttp().setHttpServer(httpServer);
}
int defaultPort = Defaults.getDefaults().vespaWebServicePort();
boolean defaultConnectorPresent = httpServer.getConnectorFactories().stream().anyMatch(connector -> connector.getListenPort() == defaultPort);
if (!defaultConnectorPresent) {
httpServer.addConnector(new ConnectorFactory.Builder("SearchServer", defaultPort).build());
}
}
private void addHostedImplicitAccessControlIfNotPresent(DeployState deployState, ApplicationContainerCluster cluster) {
Http http = cluster.getHttp();
if (http.getAccessControl().isPresent()) return;
AthenzDomain tenantDomain = deployState.getProperties().athenzDomain().orElse(null);
if (tenantDomain == null) return;
new AccessControl.Builder(tenantDomain.value())
.setHandlers(cluster)
.readEnabled(false)
.writeEnabled(false)
.clientAuthentication(AccessControl.ClientAuthentication.need)
.build()
.configureHttpFilterChains(http);
}
private Http buildHttp(DeployState deployState, ApplicationContainerCluster cluster, Element httpElement) {
Http http = new HttpBuilder().build(deployState, cluster, httpElement);
if (networking == Networking.disable)
http.removeAllServers();
return http;
}
private void addRestApis(DeployState deployState, Element spec, ApplicationContainerCluster cluster) {
for (Element restApiElem : XML.getChildren(spec, "rest-api")) {
cluster.addRestApi(
new RestApiBuilder().build(deployState, cluster, restApiElem));
}
}
private void addServlets(DeployState deployState, Element spec, ApplicationContainerCluster cluster) {
for (Element servletElem : XML.getChildren(spec, "servlet"))
cluster.addServlet(new ServletBuilder().build(deployState, cluster, servletElem));
}
private void addDocumentApi(Element spec, ApplicationContainerCluster cluster) {
ContainerDocumentApi containerDocumentApi = buildDocumentApi(cluster, spec);
if (containerDocumentApi == null) return;
cluster.setDocumentApi(containerDocumentApi);
}
private void addDocproc(DeployState deployState, Element spec, ApplicationContainerCluster cluster) {
ContainerDocproc containerDocproc = buildDocproc(deployState, cluster, spec);
if (containerDocproc == null) return;
cluster.setDocproc(containerDocproc);
ContainerDocproc.Options docprocOptions = containerDocproc.options;
cluster.setMbusParams(new ApplicationContainerCluster.MbusParams(
docprocOptions.maxConcurrentFactor, docprocOptions.documentExpansionFactor, docprocOptions.containerCoreMemory));
}
private void addSearch(DeployState deployState, Element spec, ApplicationContainerCluster cluster) {
Element searchElement = XML.getChild(spec, "search");
if (searchElement == null) return;
addIncludes(searchElement);
cluster.setSearch(buildSearch(deployState, cluster, searchElement));
addSearchHandler(cluster, searchElement);
addGUIHandler(cluster);
validateAndAddConfiguredComponents(deployState, cluster, searchElement, "renderer", ContainerModelBuilder::validateRendererElement);
}
private void addModelEvaluation(Element spec, ApplicationContainerCluster cluster, ConfigModelContext context) {
Element modelEvaluationElement = XML.getChild(spec, "model-evaluation");
if (modelEvaluationElement == null) return;
RankProfileList profiles =
context.vespaModel() != null ? context.vespaModel().rankProfileList() : RankProfileList.empty;
cluster.setModelEvaluation(new ContainerModelEvaluation(cluster, profiles));
}
private void addProcessing(DeployState deployState, Element spec, ApplicationContainerCluster cluster) {
Element processingElement = XML.getChild(spec, "processing");
if (processingElement == null) return;
addIncludes(processingElement);
cluster.setProcessingChains(new DomProcessingBuilder(null).build(deployState, cluster, processingElement),
serverBindings(processingElement, ProcessingChains.defaultBindings).toArray(BindingPattern[]::new));
validateAndAddConfiguredComponents(deployState, cluster, processingElement, "renderer", ContainerModelBuilder::validateRendererElement);
}
private ContainerSearch buildSearch(DeployState deployState, ApplicationContainerCluster containerCluster, Element producerSpec) {
SearchChains searchChains = new DomSearchChainsBuilder(null, false)
.build(deployState, containerCluster, producerSpec);
ContainerSearch containerSearch = new ContainerSearch(containerCluster, searchChains, new ContainerSearch.Options());
applyApplicationPackageDirectoryConfigs(deployState.getApplicationPackage(), containerSearch);
containerSearch.setQueryProfiles(deployState.getQueryProfiles());
containerSearch.setSemanticRules(deployState.getSemanticRules());
return containerSearch;
}
private void applyApplicationPackageDirectoryConfigs(ApplicationPackage applicationPackage,ContainerSearch containerSearch) {
PageTemplates.validate(applicationPackage);
containerSearch.setPageTemplates(PageTemplates.create(applicationPackage));
}
private void addUserHandlers(DeployState deployState, ApplicationContainerCluster cluster, Element spec) {
for (Element component: XML.getChildren(spec, "handler")) {
cluster.addComponent(
new DomHandlerBuilder(cluster).build(deployState, cluster, component));
}
}
private void checkVersion(Element spec) {
String version = spec.getAttribute("version");
if ( ! Version.fromString(version).equals(new Version(1))) {
throw new RuntimeException("Expected container version to be 1.0, but got " + version);
}
}
private void checkTagName(Element spec, DeployLogger logger) {
if (spec.getTagName().equals(DEPRECATED_CONTAINER_TAG)) {
logger.log(WARNING, "'" + DEPRECATED_CONTAINER_TAG + "' is deprecated as tag name. Use '" + CONTAINER_TAG + "' instead.");
}
}
private void addNodes(ApplicationContainerCluster cluster, Element spec, ConfigModelContext context) {
if (standaloneBuilder)
addStandaloneNode(cluster);
else
addNodesFromXml(cluster, spec, context);
}
private void addStandaloneNode(ApplicationContainerCluster cluster) {
ApplicationContainer container = new ApplicationContainer(cluster, "standalone", cluster.getContainers().size(), cluster.isHostedVespa());
cluster.addContainers(Collections.singleton(container));
}
static boolean incompatibleGCOptions(String jvmargs) {
Pattern gcAlgorithm = Pattern.compile("-XX:[-+]Use.+GC");
Pattern cmsArgs = Pattern.compile("-XX:[-+]*CMS");
return (gcAlgorithm.matcher(jvmargs).find() ||cmsArgs.matcher(jvmargs).find());
}
private static String buildJvmGCOptions(DeployState deployState, String jvmGCOPtions) {
String options = (jvmGCOPtions != null)
? jvmGCOPtions
: deployState.getProperties().jvmGCOptions();
return (options == null ||options.isEmpty())
? (deployState.isHosted() ? ContainerCluster.CMS : ContainerCluster.G1GC)
: options;
}
private static String getJvmOptions(ApplicationContainerCluster cluster, Element nodesElement, DeployLogger deployLogger) {
String jvmOptions;
if (nodesElement.hasAttribute(VespaDomBuilder.JVM_OPTIONS)) {
jvmOptions = nodesElement.getAttribute(VespaDomBuilder.JVM_OPTIONS);
if (nodesElement.hasAttribute(VespaDomBuilder.JVMARGS_ATTRIB_NAME)) {
String jvmArgs = nodesElement.getAttribute(VespaDomBuilder.JVMARGS_ATTRIB_NAME);
throw new IllegalArgumentException("You have specified both jvm-options='" + jvmOptions + "'" +
" and deprecated jvmargs='" + jvmArgs + "'. Merge jvmargs into jvm-options.");
}
} else {
jvmOptions = nodesElement.getAttribute(VespaDomBuilder.JVMARGS_ATTRIB_NAME);
if (incompatibleGCOptions(jvmOptions)) {
deployLogger.log(WARNING, "You need to move out your GC related options from 'jvmargs' to 'jvm-gc-options'");
cluster.setJvmGCOptions(ContainerCluster.G1GC);
}
}
return jvmOptions;
}
private static String extractAttribute(Element element, String attrName) {
return element.hasAttribute(attrName) ? element.getAttribute(attrName) : null;
}
void extractJvmFromLegacyNodesTag(List<ApplicationContainer> nodes, ApplicationContainerCluster cluster,
Element nodesElement, ConfigModelContext context) {
applyNodesTagJvmArgs(nodes, getJvmOptions(cluster, nodesElement, context.getDeployLogger()));
if (cluster.getJvmGCOptions().isEmpty()) {
String jvmGCOptions = extractAttribute(nodesElement, VespaDomBuilder.JVM_GC_OPTIONS);
cluster.setJvmGCOptions(buildJvmGCOptions(context.getDeployState(), jvmGCOptions));
}
applyMemoryPercentage(cluster, nodesElement.getAttribute(VespaDomBuilder.Allocated_MEMORY_ATTRIB_NAME));
}
void extractJvmTag(List<ApplicationContainer> nodes, ApplicationContainerCluster cluster,
Element jvmElement, ConfigModelContext context) {
applyNodesTagJvmArgs(nodes, jvmElement.getAttribute(VespaDomBuilder.OPTIONS));
applyMemoryPercentage(cluster, jvmElement.getAttribute(VespaDomBuilder.Allocated_MEMORY_ATTRIB_NAME));
String jvmGCOptions = extractAttribute(jvmElement, VespaDomBuilder.GC_OPTIONS);
cluster.setJvmGCOptions(buildJvmGCOptions(context.getDeployState(), jvmGCOptions));
}
/**
* Add nodes to cluster according to the given containerElement.
*
* Note: DO NOT change allocation behaviour to allow version X and Y of the config-model to allocate a different set
* of nodes. Such changes must be guarded by a common condition (e.g. feature flag) so the behaviour can be changed
* simultaneously for all active config models.
*/
private void addNodesFromXml(ApplicationContainerCluster cluster, Element containerElement, ConfigModelContext context) {
Element nodesElement = XML.getChild(containerElement, "nodes");
if (nodesElement == null) {
cluster.addContainers(allocateWithoutNodesTag(cluster, context));
} else {
List<ApplicationContainer> nodes = createNodes(cluster, containerElement, nodesElement, context);
Element jvmElement = XML.getChild(nodesElement, "jvm");
if (jvmElement == null) {
extractJvmFromLegacyNodesTag(nodes, cluster, nodesElement, context);
} else {
extractJvmTag(nodes, cluster, jvmElement, context);
}
applyRoutingAliasProperties(nodes, cluster);
applyDefaultPreload(nodes, nodesElement);
String environmentVars = getEnvironmentVariables(XML.getChild(nodesElement, ENVIRONMENT_VARIABLES_ELEMENT));
if (!environmentVars.isEmpty()) {
cluster.setEnvironmentVars(environmentVars);
}
if (useCpuSocketAffinity(nodesElement))
AbstractService.distributeCpuSocketAffinity(nodes);
cluster.addContainers(nodes);
}
}
private static String getEnvironmentVariables(Element environmentVariables) {
StringBuilder sb = new StringBuilder();
if (environmentVariables != null) {
for (Element var: XML.getChildren(environmentVariables)) {
sb.append(var.getNodeName()).append('=').append(var.getTextContent()).append(' ');
}
}
return sb.toString();
}
private List<ApplicationContainer> createNodes(ApplicationContainerCluster cluster, Element containerElement, Element nodesElement, ConfigModelContext context) {
if (nodesElement.hasAttribute("type"))
return createNodesFromNodeType(cluster, nodesElement, context);
else if (nodesElement.hasAttribute("of"))
return createNodesFromContentServiceReference(cluster, nodesElement, context);
else if (nodesElement.hasAttribute("count"))
return createNodesFromNodeCount(cluster, containerElement, nodesElement, context);
else if (cluster.isHostedVespa() && cluster.getZone().environment().isManuallyDeployed())
return createNodesFromNodeCount(cluster, containerElement, nodesElement, context);
else
return createNodesFromNodeList(context.getDeployState(), cluster, nodesElement);
}
private static void applyRoutingAliasProperties(List<ApplicationContainer> result, ApplicationContainerCluster cluster) {
if (!cluster.serviceAliases().isEmpty()) {
result.forEach(container -> {
container.setProp("servicealiases", cluster.serviceAliases().stream().collect(Collectors.joining(",")));
});
}
if (!cluster.endpointAliases().isEmpty()) {
result.forEach(container -> {
container.setProp("endpointaliases", cluster.endpointAliases().stream().collect(Collectors.joining(",")));
});
}
}
private static void applyMemoryPercentage(ApplicationContainerCluster cluster, String memoryPercentage) {
if (memoryPercentage == null || memoryPercentage.isEmpty()) return;
memoryPercentage = memoryPercentage.trim();
if ( ! memoryPercentage.endsWith("%"))
throw new IllegalArgumentException("The memory percentage given for nodes in " + cluster +
" must be an integer percentage ending by the '%' sign");
memoryPercentage = memoryPercentage.substring(0, memoryPercentage.length()-1).trim();
try {
cluster.setMemoryPercentage(Integer.parseInt(memoryPercentage));
}
catch (NumberFormatException e) {
throw new IllegalArgumentException("The memory percentage given for nodes in " + cluster +
" must be an integer percentage ending by the '%' sign");
}
}
/** Allocate a container cluster without a nodes tag */
private List<ApplicationContainer> allocateWithoutNodesTag(ApplicationContainerCluster cluster, ConfigModelContext context) {
DeployState deployState = context.getDeployState();
HostSystem hostSystem = cluster.hostSystem();
if (deployState.isHosted()) {
ClusterSpec clusterSpec = ClusterSpec.request(ClusterSpec.Type.container,
ClusterSpec.Id.from(cluster.getName()))
.vespaVersion(deployState.getWantedNodeVespaVersion())
.dockerImageRepository(deployState.getWantedDockerImageRepo())
.build();
int nodeCount = deployState.zone().environment().isProduction() ? 2 : 1;
Capacity capacity = Capacity.from(new ClusterResources(nodeCount, 1, NodeResources.unspecified()),
false,
!deployState.getProperties().isBootstrap());
var hosts = hostSystem.allocateHosts(clusterSpec, capacity, log);
return createNodesFromHosts(log, hosts, cluster);
}
else {
return singleHostContainerCluster(cluster, hostSystem.getHost(Container.SINGLENODE_CONTAINER_SERVICESPEC), context);
}
}
private List<ApplicationContainer> singleHostContainerCluster(ApplicationContainerCluster cluster, HostResource host, ConfigModelContext context) {
ApplicationContainer node = new ApplicationContainer(cluster, "container.0", 0, cluster.isHostedVespa());
node.setHostResource(host);
node.initService(context.getDeployLogger());
return List.of(node);
}
private List<ApplicationContainer> createNodesFromNodeCount(ApplicationContainerCluster cluster, Element containerElement, Element nodesElement, ConfigModelContext context) {
NodesSpecification nodesSpecification = NodesSpecification.from(new ModelElement(nodesElement), context);
Map<HostResource, ClusterMembership> hosts = nodesSpecification.provision(cluster.getRoot().hostSystem(),
ClusterSpec.Type.container,
ClusterSpec.Id.from(cluster.getName()),
log,
hasZooKeeper(containerElement));
return createNodesFromHosts(context.getDeployLogger(), hosts, cluster);
}
private List<ApplicationContainer> createNodesFromNodeType(ApplicationContainerCluster cluster, Element nodesElement, ConfigModelContext context) {
NodeType type = NodeType.valueOf(nodesElement.getAttribute("type"));
ClusterSpec clusterSpec = ClusterSpec.request(ClusterSpec.Type.container, ClusterSpec.Id.from(cluster.getName()))
.vespaVersion(context.getDeployState().getWantedNodeVespaVersion())
.dockerImageRepository(context.getDeployState().getWantedDockerImageRepo())
.build();
Map<HostResource, ClusterMembership> hosts =
cluster.getRoot().hostSystem().allocateHosts(clusterSpec,
Capacity.fromRequiredNodeType(type), log);
return createNodesFromHosts(context.getDeployLogger(), hosts, cluster);
}
private List<ApplicationContainer> createNodesFromContentServiceReference(ApplicationContainerCluster cluster, Element nodesElement, ConfigModelContext context) {
NodesSpecification nodeSpecification;
try {
nodeSpecification = NodesSpecification.from(new ModelElement(nodesElement), context);
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException(cluster + " contains an invalid reference", e);
}
String referenceId = nodesElement.getAttribute("of");
cluster.setHostClusterId(referenceId);
Map<HostResource, ClusterMembership> hosts =
StorageGroup.provisionHosts(nodeSpecification,
referenceId,
cluster.getRoot().hostSystem(),
context.getDeployLogger());
return createNodesFromHosts(context.getDeployLogger(), hosts, cluster);
}
private List<ApplicationContainer> createNodesFromHosts(DeployLogger deployLogger, Map<HostResource, ClusterMembership> hosts, ApplicationContainerCluster cluster) {
List<ApplicationContainer> nodes = new ArrayList<>();
for (Map.Entry<HostResource, ClusterMembership> entry : hosts.entrySet()) {
String id = "container." + entry.getValue().index();
ApplicationContainer container = new ApplicationContainer(cluster, id, entry.getValue().retired(), entry.getValue().index(), cluster.isHostedVespa());
container.setHostResource(entry.getKey());
container.initService(deployLogger);
nodes.add(container);
}
return nodes;
}
private List<ApplicationContainer> createNodesFromNodeList(DeployState deployState, ApplicationContainerCluster cluster, Element nodesElement) {
List<ApplicationContainer> nodes = new ArrayList<>();
int nodeIndex = 0;
for (Element nodeElem: XML.getChildren(nodesElement, "node")) {
nodes.add(new ContainerServiceBuilder("container." + nodeIndex, nodeIndex).build(deployState, cluster, nodeElem));
nodeIndex++;
}
return nodes;
}
private static boolean useCpuSocketAffinity(Element nodesElement) {
if (nodesElement.hasAttribute(VespaDomBuilder.CPU_SOCKET_AFFINITY_ATTRIB_NAME))
return Boolean.parseBoolean(nodesElement.getAttribute(VespaDomBuilder.CPU_SOCKET_AFFINITY_ATTRIB_NAME));
else
return false;
}
private static void applyNodesTagJvmArgs(List<ApplicationContainer> containers, String jvmArgs) {
for (Container container: containers) {
if (container.getAssignedJvmOptions().isEmpty())
container.prependJvmOptions(jvmArgs);
}
}
private static void applyDefaultPreload(List<ApplicationContainer> containers, Element nodesElement) {
if (! nodesElement.hasAttribute(VespaDomBuilder.PRELOAD_ATTRIB_NAME)) return;
for (Container container: containers)
container.setPreLoad(nodesElement.getAttribute(VespaDomBuilder.PRELOAD_ATTRIB_NAME));
}
private void addSearchHandler(ApplicationContainerCluster cluster, Element searchElement) {
cluster.addComponent(new ProcessingHandler<>(cluster.getSearch().getChains(),
"com.yahoo.search.searchchain.ExecutionFactory"));
cluster.addComponent(
new SearchHandler(
cluster,
serverBindings(searchElement, SearchHandler.DEFAULT_BINDING),
ContainerThreadpool.UserOptions.fromXml(searchElement).orElse(null)));
}
private void addGUIHandler(ApplicationContainerCluster cluster) {
Handler<?> guiHandler = new GUIHandler();
guiHandler.addServerBindings(SystemBindingPattern.fromHttpPath(GUIHandler.BINDING_PATH));
cluster.addComponent(guiHandler);
}
private List<BindingPattern> serverBindings(Element searchElement, BindingPattern... defaultBindings) {
List<Element> bindings = XML.getChildren(searchElement, "binding");
if (bindings.isEmpty())
return List.of(defaultBindings);
return toBindingList(bindings);
}
private List<BindingPattern> toBindingList(List<Element> bindingElements) {
List<BindingPattern> result = new ArrayList<>();
for (Element element: bindingElements) {
String text = element.getTextContent().trim();
if (!text.isEmpty())
result.add(UserBindingPattern.fromPattern(text));
}
return result;
}
private ContainerDocumentApi buildDocumentApi(ApplicationContainerCluster cluster, Element spec) {
Element documentApiElement = XML.getChild(spec, "document-api");
if (documentApiElement == null) return null;
ContainerDocumentApi.Options documentApiOptions = DocumentApiOptionsBuilder.build(documentApiElement);
return new ContainerDocumentApi(cluster, documentApiOptions);
}
private ContainerDocproc buildDocproc(DeployState deployState, ApplicationContainerCluster cluster, Element spec) {
Element docprocElement = XML.getChild(spec, "document-processing");
if (docprocElement == null)
return null;
addIncludes(docprocElement);
DocprocChains chains = new DomDocprocChainsBuilder(null, false).build(deployState, cluster, docprocElement);
ContainerDocproc.Options docprocOptions = DocprocOptionsBuilder.build(docprocElement);
return new ContainerDocproc(cluster, chains, docprocOptions, !standaloneBuilder);
}
private void addIncludes(Element parentElement) {
List<Element> includes = XML.getChildren(parentElement, IncludeDirs.INCLUDE);
if (includes.isEmpty()) {
return;
}
if (app == null) {
throw new IllegalArgumentException("Element <include> given in XML config, but no application package given.");
}
for (Element include : includes) {
addInclude(parentElement, include);
}
}
private void addInclude(Element parentElement, Element include) {
String dirName = include.getAttribute(IncludeDirs.DIR);
app.validateIncludeDir(dirName);
List<Element> includedFiles = Xml.allElemsFromPath(app, dirName);
for (Element includedFile : includedFiles) {
List<Element> includedSubElements = XML.getChildren(includedFile);
for (Element includedSubElement : includedSubElements) {
Node copiedNode = parentElement.getOwnerDocument().importNode(includedSubElement, true);
parentElement.appendChild(copiedNode);
}
}
}
private static void addConfiguredComponents(DeployState deployState, ContainerCluster<? extends Container> cluster,
Element spec, String componentName) {
for (Element node : XML.getChildren(spec, componentName)) {
cluster.addComponent(new DomComponentBuilder().build(deployState, cluster, node));
}
}
private static void validateAndAddConfiguredComponents(DeployState deployState,
ContainerCluster<? extends Container> cluster,
Element spec, String componentName,
Consumer<Element> elementValidator) {
for (Element node : XML.getChildren(spec, componentName)) {
elementValidator.accept(node);
cluster.addComponent(new DomComponentBuilder().build(deployState, cluster, node));
}
}
private void addIdentityProvider(ApplicationContainerCluster cluster,
List<ConfigServerSpec> configServerSpecs,
HostName loadBalancerName,
URI ztsUrl,
String athenzDnsSuffix,
Zone zone,
DeploymentSpec spec) {
spec.athenzDomain()
.ifPresent(domain -> {
AthenzService service = spec.instance(app.getApplicationId().instance())
.flatMap(instanceSpec -> instanceSpec.athenzService(zone.environment(), zone.region()))
.or(() -> spec.athenzService())
.orElseThrow(() -> new RuntimeException("Missing Athenz service configuration in instance '" + app.getApplicationId().instance() + "'"));
String zoneDnsSuffix = zone.environment().value() + "-" + zone.region().value() + "." + athenzDnsSuffix;
IdentityProvider identityProvider = new IdentityProvider(domain, service, getLoadBalancerName(loadBalancerName, configServerSpecs), ztsUrl, zoneDnsSuffix, zone);
cluster.addComponent(identityProvider);
cluster.getContainers().forEach(container -> {
container.setProp("identity.domain", domain.value());
container.setProp("identity.service", service.value());
});
});
}
private HostName getLoadBalancerName(HostName loadbalancerName, List<ConfigServerSpec> configServerSpecs) {
return Optional.ofNullable(loadbalancerName)
.orElseGet(
() -> HostName.from(configServerSpecs.stream()
.findFirst()
.map(ConfigServerSpec::getHostName)
.orElse("unknown")
));
}
private static boolean hasZooKeeper(Element spec) {
return XML.getChild(spec, "zookeeper") != null;
}
/** Disallow renderers named "XmlRenderer" or "JsonRenderer" */
private static void validateRendererElement(Element element) {
String idAttr = element.getAttribute("id");
if (idAttr.equals(xmlRendererId) || idAttr.equals(jsonRendererId)) {
throw new IllegalArgumentException(String.format("Renderer id %s is reserved for internal use", idAttr));
}
}
public static boolean isContainerTag(Element element) {
return CONTAINER_TAG.equals(element.getTagName()) || DEPRECATED_CONTAINER_TAG.equals(element.getTagName());
}
} |
I think you can already use something along the lines of ```java assertThatThrownBy(() -> createTableSink(pkSchema, getKeyValueOptions()) .isInstanceOf(...) .hasMessageContaining() ``` | public void testPrimaryKeyValidation() {
final ResolvedSchema pkSchema =
new ResolvedSchema(
SCHEMA.getColumns(),
SCHEMA.getWatermarkSpecs(),
UniqueConstraint.primaryKey(NAME, Collections.singletonList(NAME)));
Map<String, String> sinkOptions =
getModifiedOptions(
getBasicSinkOptions(),
options ->
options.put(
String.format(
"%s.%s",
TestFormatFactory.IDENTIFIER,
TestFormatFactory.CHANGELOG_MODE.key()),
"I;UA;UB;D"));
createTableSink(pkSchema, sinkOptions);
try {
createTableSink(pkSchema, getBasicSinkOptions());
fail();
} catch (Throwable t) {
String error =
"The Kafka table 'default.default.t1' with 'test-format' format"
+ " doesn't support defining PRIMARY KEY constraint on the table, because it can't"
+ " guarantee the semantic of primary key.";
assertEquals(error, t.getCause().getMessage());
}
try {
createTableSink(pkSchema, getKeyValueOptions());
fail();
} catch (Throwable t) {
String error =
"The Kafka table 'default.default.t1' with 'test-format' format"
+ " doesn't support defining PRIMARY KEY constraint on the table, because it can't"
+ " guarantee the semantic of primary key.";
assertEquals(error, t.getCause().getMessage());
}
Map<String, String> sourceOptions =
getModifiedOptions(
getBasicSourceOptions(),
options ->
options.put(
String.format(
"%s.%s",
TestFormatFactory.IDENTIFIER,
TestFormatFactory.CHANGELOG_MODE.key()),
"I;UA;UB;D"));
createTableSource(pkSchema, sourceOptions);
try {
createTableSource(pkSchema, getBasicSourceOptions());
fail();
} catch (Throwable t) {
String error =
"The Kafka table 'default.default.t1' with 'test-format' format"
+ " doesn't support defining PRIMARY KEY constraint on the table, because it can't"
+ " guarantee the semantic of primary key.";
assertEquals(error, t.getCause().getMessage());
}
} | createTableSink(pkSchema, getKeyValueOptions()); | public void testPrimaryKeyValidation() {
final ResolvedSchema pkSchema =
new ResolvedSchema(
SCHEMA.getColumns(),
SCHEMA.getWatermarkSpecs(),
UniqueConstraint.primaryKey(NAME, Collections.singletonList(NAME)));
Map<String, String> sinkOptions =
getModifiedOptions(
getBasicSinkOptions(),
options ->
options.put(
String.format(
"%s.%s",
TestFormatFactory.IDENTIFIER,
TestFormatFactory.CHANGELOG_MODE.key()),
"I;UA;UB;D"));
createTableSink(pkSchema, sinkOptions);
assertThatExceptionOfType(ValidationException.class)
.isThrownBy(() -> createTableSink(pkSchema, getBasicSinkOptions()))
.havingRootCause()
.withMessage(
"The Kafka table 'default.default.t1' with 'test-format' format"
+ " doesn't support defining PRIMARY KEY constraint on the table, because it can't"
+ " guarantee the semantic of primary key.");
assertThatExceptionOfType(ValidationException.class)
.isThrownBy(() -> createTableSink(pkSchema, getKeyValueOptions()))
.havingRootCause()
.withMessage(
"The Kafka table 'default.default.t1' with 'test-format' format"
+ " doesn't support defining PRIMARY KEY constraint on the table, because it can't"
+ " guarantee the semantic of primary key.");
Map<String, String> sourceOptions =
getModifiedOptions(
getBasicSourceOptions(),
options ->
options.put(
String.format(
"%s.%s",
TestFormatFactory.IDENTIFIER,
TestFormatFactory.CHANGELOG_MODE.key()),
"I;UA;UB;D"));
createTableSource(pkSchema, sourceOptions);
assertThatExceptionOfType(ValidationException.class)
.isThrownBy(() -> createTableSource(pkSchema, getBasicSourceOptions()))
.havingRootCause()
.withMessage(
"The Kafka table 'default.default.t1' with 'test-format' format"
+ " doesn't support defining PRIMARY KEY constraint on the table, because it can't"
+ " guarantee the semantic of primary key.");
} | class 'abc'")));
final Map<String, String> modifiedOptions =
getModifiedOptions(
getBasicSinkOptions(), options -> options.put("sink.partitioner", "abc"));
createTableSink(SCHEMA, modifiedOptions);
}
@Test
public void testInvalidRoundRobinPartitionerWithKeyFields() {
thrown.expect(ValidationException.class);
thrown.expect(
containsCause(
new ValidationException(
"Currently 'round-robin' partitioner only works "
+ "when option 'key.fields' is not specified.")));
final Map<String, String> modifiedOptions =
getModifiedOptions(
getKeyValueOptions(),
options -> options.put("sink.partitioner", "round-robin"));
createTableSink(SCHEMA, modifiedOptions);
} | class 'abc'")));
final Map<String, String> modifiedOptions =
getModifiedOptions(
getBasicSinkOptions(), options -> options.put("sink.partitioner", "abc"));
createTableSink(SCHEMA, modifiedOptions);
}
@Test
public void testInvalidRoundRobinPartitionerWithKeyFields() {
thrown.expect(ValidationException.class);
thrown.expect(
containsCause(
new ValidationException(
"Currently 'round-robin' partitioner only works "
+ "when option 'key.fields' is not specified.")));
final Map<String, String> modifiedOptions =
getModifiedOptions(
getKeyValueOptions(),
options -> options.put("sink.partitioner", "round-robin"));
createTableSink(SCHEMA, modifiedOptions);
} |
Should we use https://github.com/ballerina-platform/ballerina-lang/blob/master/bvm/ballerina-runtime/src/main/java/org/ballerinalang/jvm/StringUtils.java#L112 instead? | public static Object next(Strand strand, ObjectValue m) {
IteratorValue xmlIterator = (IteratorValue) m.getNativeData("&iterator&");
if (xmlIterator == null) {
xmlIterator = ((XMLValue) m.get(new BmpStringValue("m"))).getIterator();
m.addNativeData("&iterator&", xmlIterator);
}
if (xmlIterator.hasNext()) {
Object xmlValue = xmlIterator.next();
Map<String, BField> fields = new HashMap<>();
BUnionType type = new BUnionType(Arrays.asList(BTypes.typeString, BTypes.typeXML));
fields.put("value", new BField(type, "value", Flags.PUBLIC + Flags.REQUIRED));
BRecordType recordType = new BRecordType("$$returnType$$", null, 0, fields,
null, true, 0);
return BallerinaValues.createRecord(new MapValueImpl<>(recordType), xmlValue);
}
return null;
} | xmlIterator = ((XMLValue) m.get(new BmpStringValue("m"))).getIterator(); | public static Object next(Strand strand, ObjectValue m) {
IteratorValue xmlIterator = (IteratorValue) m.getNativeData("&iterator&");
if (xmlIterator == null) {
xmlIterator = ((XMLValue) m.get("m")).getIterator();
m.addNativeData("&iterator&", xmlIterator);
}
if (xmlIterator.hasNext()) {
Object xmlValue = xmlIterator.next();
return BallerinaValues.createRecord(new MapValueImpl<>(BTypes.xmlItrNextReturnType), xmlValue);
}
return null;
} | class Next {
} | class Next {
} |
Document indexing doesn't always complete instantly, so I don't want to begin testing while it is still running nor do I want to spam the service with a ton of requests. | protected Mono<Void> populateIndex(int documentCount, String documentSize) {
/*
* Generate the count of documents using the given size. Then, upload the documents in batches of 100, this
* prevents the batch from triggering the services request size limit to fail. Finally, continuously poll the
* index for its document count until it is equal to the count passed.
*/
return Mono.defer(() -> {
List<Hotel> hotels = DocumentGenerator.generateHotels(documentCount, DocumentSize.valueOf(documentSize));
return Flux.range(0, (int) Math.ceil(hotels.size() / 100D))
.map(i -> hotels.subList(i * 100, Math.min((i + 1) * 100, hotels.size())))
.flatMap(hotelDocuments -> searchAsyncClient.indexDocuments(new IndexDocumentsBatch<Hotel>()
.addUploadActions(hotelDocuments)))
.then();
}).then(Mono.defer(() -> searchAsyncClient.getDocumentCount()
.delaySubscription(Duration.ofSeconds(1))
.filter(count -> count == documentCount)
.repeatWhenEmpty(Flux::repeat)
.then()));
} | .delaySubscription(Duration.ofSeconds(1)) | protected Mono<Void> populateIndex(int documentCount, String documentSize) {
/*
* Generate the count of documents using the given size. Then, upload the documents in batches of 100, this
* prevents the batch from triggering the services request size limit to fail. Finally, continuously poll the
* index for its document count until it is equal to the count passed.
*/
return Mono.defer(() -> {
List<Hotel> hotels = DocumentGenerator.generateHotels(documentCount, DocumentSize.valueOf(documentSize));
return Flux.range(0, (int) Math.ceil(hotels.size() / 100D))
.map(i -> hotels.subList(i * 100, Math.min((i + 1) * 100, hotels.size())))
.flatMap(hotelDocuments -> searchAsyncClient.indexDocuments(new IndexDocumentsBatch<Hotel>()
.addUploadActions(hotelDocuments)))
.then();
}).then(Mono.defer(() -> searchAsyncClient.getDocumentCount()
.delaySubscription(Duration.ofSeconds(1))
.filter(count -> count == documentCount)
.repeatWhenEmpty(Flux::repeat)
.then()));
} | class ServiceTest<TOptions extends PerfStressOptions> extends PerfStressTest<TOptions> {
private static final String CONFIGURATION_ERROR = "Configuration %s must be set in either environment variables "
+ "or system properties.%n";
private static final String ALLOWED_INDEX_CHARACTERS = "abcdefghijklmnopqrstuvwxyz0123456789";
private static final int INDEX_NAME_LENGTH = 24;
protected static final String SUGGESTER_NAME = "sg";
protected final SearchClient searchClient;
protected final SearchAsyncClient searchAsyncClient;
private final SearchIndexAsyncClient searchIndexAsyncClient;
private final String indexName;
public ServiceTest(TOptions options) {
super(options);
String searchEndpoint = Configuration.getGlobalConfiguration().get("SEARCH_ENDPOINT");
if (CoreUtils.isNullOrEmpty(searchEndpoint)) {
System.out.printf(CONFIGURATION_ERROR, "SEARCH_ENDPOINT");
System.exit(1);
}
String searchApiKey = Configuration.getGlobalConfiguration().get("SEARCH_API_KEY");
if (CoreUtils.isNullOrEmpty(searchApiKey)) {
System.out.printf(CONFIGURATION_ERROR, "SEARCH_API_KEY");
System.exit(1);
}
SearchIndexClientBuilder builder = new SearchIndexClientBuilder()
.endpoint(searchEndpoint)
.credential(new AzureKeyCredential(searchApiKey))
.httpClient(new NettyAsyncHttpClientBuilder().build());
this.searchIndexAsyncClient = builder.buildAsyncClient();
Random random = new Random();
StringBuilder stringBuilder = new StringBuilder();
for (int i = 0; i < INDEX_NAME_LENGTH; i++) {
stringBuilder.append(ALLOWED_INDEX_CHARACTERS.charAt(random.nextInt(ALLOWED_INDEX_CHARACTERS.length())));
}
this.indexName = stringBuilder.toString();
this.searchClient = builder.buildClient().getSearchClient(this.indexName);
this.searchAsyncClient = this.searchIndexAsyncClient.getSearchAsyncClient(this.indexName);
}
@Override
public Mono<Void> globalSetupAsync() {
return searchIndexAsyncClient
.createIndex(new SearchIndex(indexName, SearchIndexAsyncClient.buildSearchFields(Hotel.class, null))
.setSuggesters(new SearchSuggester(SUGGESTER_NAME, Arrays.asList("Description", "HotelName"))))
.then();
}
@Override
public Mono<Void> globalCleanupAsync() {
return searchIndexAsyncClient.deleteIndex(indexName);
}
} | class ServiceTest<TOptions extends PerfStressOptions> extends PerfStressTest<TOptions> {
private static final String CONFIGURATION_ERROR = "Configuration %s must be set in either environment variables "
+ "or system properties.%n";
private static final String ALLOWED_INDEX_CHARACTERS = "abcdefghijklmnopqrstuvwxyz0123456789";
private static final int INDEX_NAME_LENGTH = 24;
protected static final String SUGGESTER_NAME = "sg";
protected final SearchClient searchClient;
protected final SearchAsyncClient searchAsyncClient;
private final SearchIndexAsyncClient searchIndexAsyncClient;
private final String indexName;
public ServiceTest(TOptions options) {
super(options);
String searchEndpoint = Configuration.getGlobalConfiguration().get("SEARCH_ENDPOINT");
if (CoreUtils.isNullOrEmpty(searchEndpoint)) {
System.out.printf(CONFIGURATION_ERROR, "SEARCH_ENDPOINT");
System.exit(1);
}
String searchApiKey = Configuration.getGlobalConfiguration().get("SEARCH_API_KEY");
if (CoreUtils.isNullOrEmpty(searchApiKey)) {
System.out.printf(CONFIGURATION_ERROR, "SEARCH_API_KEY");
System.exit(1);
}
SearchIndexClientBuilder builder = new SearchIndexClientBuilder()
.endpoint(searchEndpoint)
.credential(new AzureKeyCredential(searchApiKey))
.httpClient(new NettyAsyncHttpClientBuilder()
.proxy(new ProxyOptions(ProxyOptions.Type.HTTP, new InetSocketAddress("localhost", 8888)))
.build());
this.searchIndexAsyncClient = builder.buildAsyncClient();
this.indexName = new Random().ints(INDEX_NAME_LENGTH, 0, ALLOWED_INDEX_CHARACTERS.length())
.mapToObj(ALLOWED_INDEX_CHARACTERS::charAt)
.collect(StringBuilder::new, StringBuilder::append, StringBuilder::append)
.toString();
this.searchClient = builder.buildClient().getSearchClient(this.indexName);
this.searchAsyncClient = this.searchIndexAsyncClient.getSearchAsyncClient(this.indexName);
}
@Override
public Mono<Void> globalSetupAsync() {
return searchIndexAsyncClient
.createIndex(new SearchIndex(indexName, SearchIndexAsyncClient.buildSearchFields(Hotel.class, null))
.setSuggesters(new SearchSuggester(SUGGESTER_NAME, Arrays.asList("Description", "HotelName"))))
.then();
}
@Override
public Mono<Void> globalCleanupAsync() {
return searchIndexAsyncClient.deleteIndex(indexName);
}
} |
Try to leave one comments and resolve the rests | public static DataChangeDetectionPolicy map(com.azure.search.documents.implementation.models.DataChangeDetectionPolicy obj) {
if (obj instanceof HighWaterMarkChangeDetectionPolicy) {
return HighWaterMarkChangeDetectionPolicyConverter.map((HighWaterMarkChangeDetectionPolicy) obj);
}
if (obj instanceof SqlIntegratedChangeTrackingPolicy) {
return SqlIntegratedChangeTrackingPolicyConverter.map((SqlIntegratedChangeTrackingPolicy) obj);
}
throw LOGGER.logExceptionAsError(new RuntimeException(String.format(ABSTRACT_EXTERNAL_ERROR_MSG,
obj.getClass().getSimpleName())));
} | if (obj instanceof HighWaterMarkChangeDetectionPolicy) { | public static DataChangeDetectionPolicy map(com.azure.search.documents.implementation.models.DataChangeDetectionPolicy obj) {
if (obj instanceof HighWaterMarkChangeDetectionPolicy) {
return HighWaterMarkChangeDetectionPolicyConverter.map((HighWaterMarkChangeDetectionPolicy) obj);
}
if (obj instanceof SqlIntegratedChangeTrackingPolicy) {
return SqlIntegratedChangeTrackingPolicyConverter.map((SqlIntegratedChangeTrackingPolicy) obj);
}
throw LOGGER.logExceptionAsError(new RuntimeException(String.format(ABSTRACT_EXTERNAL_ERROR_MSG,
obj.getClass().getSimpleName())));
} | class converter.
*/ | class converter.
*/ |
It is an big time but not too big time for timeout balancing the load success rate and the cluster overload. | public void fillByteBuffer(ByteBuffer buffer) throws IOException, InterruptedException {
if (closed) {
throw new IOException("Stream is already closed.");
}
ByteArrayInputStream inputStream = new ByteArrayInputStream(buffer.array(), buffer.position(), buffer.limit());
queue.offer(inputStream, 300, TimeUnit.SECONDS);
} | queue.offer(inputStream, 300, TimeUnit.SECONDS); | public void fillByteBuffer(ByteBuffer buffer) throws IOException, InterruptedException {
if (closed) {
throw new IOException("Stream is already closed.");
}
ByteArrayInputStream inputStream = new ByteArrayInputStream(buffer.array(), buffer.position(), buffer.limit());
queue.offer(inputStream, 300, TimeUnit.SECONDS);
} | class ByteBufferNetworkInputStream extends InputStream {
private ArrayBlockingQueue<ByteArrayInputStream> queue;
private ByteArrayInputStream currentInputStream;
private volatile boolean finished = false;
private volatile boolean closed = false;
public ByteBufferNetworkInputStream() {
this(32);
}
public ByteBufferNetworkInputStream(int capacity) {
this.queue = new ArrayBlockingQueue<>(capacity);
}
public void markFinished() {
this.finished = true;
}
private ByteArrayInputStream getNextByteArrayStream() throws IOException {
if (currentInputStream == null || currentInputStream.available() == 0) {
while (!finished || !queue.isEmpty()) {
try {
currentInputStream = queue.poll(1, TimeUnit.SECONDS);
if (currentInputStream != null) {
return currentInputStream;
}
} catch (InterruptedException e) {
throw new IOException("Failed to get next stream");
}
}
return null;
}
return currentInputStream;
}
@Override
public int read() throws IOException {
ByteArrayInputStream stream = getNextByteArrayStream();
if (stream == null) {
return -1;
}
return stream.read();
}
public int read(byte[] b, int off, int len) throws IOException {
ByteArrayInputStream stream = getNextByteArrayStream();
if (stream == null) {
return -1;
}
return stream.read(b, off, len);
}
public int read(byte[] b) throws IOException {
return read(b, 0, b.length);
}
public void close() throws IOException {
closed = true;
ByteArrayInputStream stream = getNextByteArrayStream();
if (stream == null) {
return;
}
stream.close();
while (!queue.isEmpty()) {
queue.poll().close();
}
}
} | class ByteBufferNetworkInputStream extends InputStream {
private ArrayBlockingQueue<ByteArrayInputStream> queue;
private ByteArrayInputStream currentInputStream;
private volatile boolean finished = false;
private volatile boolean closed = false;
public ByteBufferNetworkInputStream() {
this(32);
}
public ByteBufferNetworkInputStream(int capacity) {
this.queue = new ArrayBlockingQueue<>(capacity);
}
public void markFinished() {
this.finished = true;
}
private ByteArrayInputStream getNextByteArrayStream() throws IOException {
if (currentInputStream == null || currentInputStream.available() == 0) {
while (!finished || !queue.isEmpty()) {
try {
currentInputStream = queue.poll(1, TimeUnit.SECONDS);
if (currentInputStream != null) {
return currentInputStream;
}
} catch (InterruptedException e) {
throw new IOException("Failed to get next stream");
}
}
return null;
}
return currentInputStream;
}
@Override
public int read() throws IOException {
ByteArrayInputStream stream = getNextByteArrayStream();
if (stream == null) {
return -1;
}
return stream.read();
}
public int read(byte[] b, int off, int len) throws IOException {
ByteArrayInputStream stream = getNextByteArrayStream();
if (stream == null) {
return -1;
}
return stream.read(b, off, len);
}
public int read(byte[] b) throws IOException {
return read(b, 0, b.length);
}
public void close() throws IOException {
closed = true;
ByteArrayInputStream stream = getNextByteArrayStream();
if (stream == null) {
return;
}
stream.close();
while (!queue.isEmpty()) {
queue.poll().close();
}
}
} |
Hello @gastaldi, do you mean it's broken because it isn't doing a check to see if the `file` is a directory, or something else? | public InputStream getResourceAsStream(String unsanitisedName) {
String name = sanitizeName(unsanitisedName);
ClassLoaderState state = getState();
if (state.bannedResources.contains(name)) {
return null;
}
if (name.endsWith(".class")) {
ClassPathElement[] providers = state.loadableResources.get(name);
if (providers != null) {
return new ByteArrayInputStream(providers[0].getResource(name).getData());
}
} else {
for (ClassPathElement i : elements) {
ClassPathResource res = i.getResource(name);
if (res != null) {
if (res.isDirectory()) {
try {
return res.getUrl().openStream();
} catch (IOException e) {
log.debug("Ignoring exception that occurred while opening a stream for resource " + unsanitisedName,
e);
continue;
}
}
return new ByteArrayInputStream(res.getData());
}
}
}
return parent.getResourceAsStream(unsanitisedName);
} | return res.getUrl().openStream(); | public InputStream getResourceAsStream(String unsanitisedName) {
String name = sanitizeName(unsanitisedName);
ClassLoaderState state = getState();
if (state.bannedResources.contains(name)) {
return null;
}
if (name.endsWith(".class")) {
ClassPathElement[] providers = state.loadableResources.get(name);
if (providers != null) {
return new ByteArrayInputStream(providers[0].getResource(name).getData());
}
} else {
for (ClassPathElement i : elements) {
ClassPathResource res = i.getResource(name);
if (res != null) {
if (res.isDirectory()) {
try {
return res.getUrl().openStream();
} catch (IOException e) {
log.debug("Ignoring exception that occurred while opening a stream for resource " + unsanitisedName,
e);
continue;
}
}
return new ByteArrayInputStream(res.getData());
}
}
}
return parent.getResourceAsStream(unsanitisedName);
} | class files though, adding them causes a restart
if (name.endsWith(".class") && !endsWithTrailingSlash) {
ClassPathElement[] providers = state.loadableResources.get(name);
if (providers != null) {
return providers[0].getResource(name).getUrl();
}
} | class files though, adding them causes a restart
if (name.endsWith(".class") && !endsWithTrailingSlash) {
ClassPathElement[] providers = state.loadableResources.get(name);
if (providers != null) {
return providers[0].getResource(name).getUrl();
}
} |
Yes, that is true. `CosmosConfig` is not directly related to the database name. In fact, I was also wondering, if we can completely get rid of `database` from `CosmosConfig` and customers can directly override the method `getDatabaseName()` from `CosmosConfigurationSupport` class. That's what spring-data-mongodb does. They don't have any config tied to the database. They only have `getDatabaseName()` in `MongoConfigurationSupport` class. However, it will not hurt to keep it in `CosmosConfig`, because of easiness of use. Having `database` in `CosmosConfig` provides the user ability to use the database from the `CosmosConfig` bean. What are your thoughts on this @zhoufenqin @saragluna | public CosmosConfig getCosmosConfig() {
return CosmosConfig.builder()
.enableQueryMetrics(true)
.responseDiagnosticsProcessor(new ResponseDiagnosticsProcessorImplementation())
.database(secondaryProperties.getDatabase())
.build();
} | .build(); | public CosmosConfig getCosmosConfig() {
return CosmosConfig.builder()
.enableQueryMetrics(true)
.responseDiagnosticsProcessor(new ResponseDiagnosticsProcessorImplementation())
.build();
} | class PrimaryDataSourceConfiguration2 {
@Bean
public ReactiveCosmosTemplate primaryReactiveCosmosTemplate(CosmosAsyncClient cosmosAsyncClient, CosmosConfig cosmosConfig, MappingCosmosConverter mappingCosmosConverter) {
return new ReactiveCosmosTemplate(cosmosAsyncClient, "test1_2", cosmosConfig, mappingCosmosConverter, cosmosAuditingHandler);
}
} | class PrimaryDataSourceConfiguration2 {
@Bean
public ReactiveCosmosTemplate primaryReactiveCosmosTemplate(CosmosAsyncClient cosmosAsyncClient, CosmosConfig cosmosConfig, MappingCosmosConverter mappingCosmosConverter) {
return new ReactiveCosmosTemplate(cosmosAsyncClient, "test1_2", cosmosConfig, mappingCosmosConverter, cosmosAuditingHandler);
}
} |
Should have one with more instances, but I guess we need instance orchestration first :) | public void multiple_endpoints() {
tester.computeVersionStatus();
createAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.instances("instance1")
.region("us-west-1")
.region("us-east-3")
.region("eu-west-1")
.endpoint("eu", "default", "eu-west-1")
.endpoint("default", "default", "us-west-1", "us-east-3")
.build();
ApplicationId id = createTenantAndApplication();
long projectId = 1;
MultiPartStreamer deployData = createApplicationDeployData(Optional.empty(), false);
startAndTestChange(controllerTester, id, projectId, applicationPackage, deployData, 100);
for (var job : List.of(JobType.productionUsWest1, JobType.productionUsEast3, JobType.productionEuWest1)) {
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/" + job.zone(SystemName.main).region().value() + "/deploy", POST)
.data(deployData)
.screwdriverIdentity(SCREWDRIVER_ID),
new File("deploy-result.json"));
controllerTester.jobCompletion(job)
.application(id)
.projectId(projectId)
.submit();
}
setZoneInRotation("rotation-fqdn-2", ZoneId.from("prod", "us-west-1"));
setZoneInRotation("rotation-fqdn-2", ZoneId.from("prod", "us-east-3"));
setZoneInRotation("rotation-fqdn-1", ZoneId.from("prod", "eu-west-1"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-west-1/global-rotation", GET)
.userIdentity(USER_ID),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"application 'tenant1.application1.instance1' has multiple rotations. Query parameter 'endpointId' must be given\"}",
400);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-west-1/global-rotation?endpointId=default", GET)
.userIdentity(USER_ID),
"{\"bcpStatus\":{\"rotationStatus\":\"IN\"}}",
200);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-west-1/global-rotation?endpointId=eu", GET)
.userIdentity(USER_ID),
"{\"bcpStatus\":{\"rotationStatus\":\"UNKNOWN\"}}",
200);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/eu-west-1/global-rotation?endpointId=eu", GET)
.userIdentity(USER_ID),
"{\"bcpStatus\":{\"rotationStatus\":\"IN\"}}",
200);
} | .instances("instance1") | public void multiple_endpoints() {
tester.computeVersionStatus();
createAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.instances("instance1")
.region("us-west-1")
.region("us-east-3")
.region("eu-west-1")
.endpoint("eu", "default", "eu-west-1")
.endpoint("default", "default", "us-west-1", "us-east-3")
.build();
ApplicationId id = createTenantAndApplication();
long projectId = 1;
MultiPartStreamer deployData = createApplicationDeployData(Optional.empty(), false);
startAndTestChange(controllerTester, id, projectId, applicationPackage, deployData, 100);
for (var job : List.of(JobType.productionUsWest1, JobType.productionUsEast3, JobType.productionEuWest1)) {
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/" + job.zone(SystemName.main).region().value() + "/deploy", POST)
.data(deployData)
.screwdriverIdentity(SCREWDRIVER_ID),
new File("deploy-result.json"));
controllerTester.jobCompletion(job)
.application(id)
.projectId(projectId)
.submit();
}
setZoneInRotation("rotation-fqdn-2", ZoneId.from("prod", "us-west-1"));
setZoneInRotation("rotation-fqdn-2", ZoneId.from("prod", "us-east-3"));
setZoneInRotation("rotation-fqdn-1", ZoneId.from("prod", "eu-west-1"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-west-1/global-rotation", GET)
.userIdentity(USER_ID),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"application 'tenant1.application1.instance1' has multiple rotations. Query parameter 'endpointId' must be given\"}",
400);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-west-1/global-rotation?endpointId=default", GET)
.userIdentity(USER_ID),
"{\"bcpStatus\":{\"rotationStatus\":\"IN\"}}",
200);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-west-1/global-rotation?endpointId=eu", GET)
.userIdentity(USER_ID),
"{\"bcpStatus\":{\"rotationStatus\":\"UNKNOWN\"}}",
200);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/eu-west-1/global-rotation?endpointId=eu", GET)
.userIdentity(USER_ID),
"{\"bcpStatus\":{\"rotationStatus\":\"IN\"}}",
200);
} | class ApplicationApiTest extends ControllerContainerTest {
private static final String responseFiles = "src/test/java/com/yahoo/vespa/hosted/controller/restapi/application/responses/";
private static final ApplicationPackage applicationPackageDefault = new ApplicationPackageBuilder()
.instances("default")
.environment(Environment.prod)
.globalServiceId("foo")
.region("us-central-1")
.region("us-east-3")
.region("us-west-1")
.blockChange(false, true, "mon-fri", "0-8", "UTC")
.build();
private static final ApplicationPackage applicationPackageInstance1 = new ApplicationPackageBuilder()
.instances("instance1")
.environment(Environment.prod)
.globalServiceId("foo")
.region("us-central-1")
.region("us-east-3")
.region("us-west-1")
.blockChange(false, true, "mon-fri", "0-8", "UTC")
.build();
private static final AthenzDomain ATHENZ_TENANT_DOMAIN = new AthenzDomain("domain1");
private static final AthenzDomain ATHENZ_TENANT_DOMAIN_2 = new AthenzDomain("domain2");
private static final ScrewdriverId SCREWDRIVER_ID = new ScrewdriverId("12345");
private static final UserId USER_ID = new UserId("myuser");
private static final UserId HOSTED_VESPA_OPERATOR = new UserId("johnoperator");
private static final OktaAccessToken OKTA_AT = new OktaAccessToken("dummy");
private static final ZoneId TEST_ZONE = ZoneId.from(Environment.test, RegionName.from("us-east-1"));
private static final ZoneId STAGING_ZONE = ZoneId.from(Environment.staging, RegionName.from("us-east-3"));
private ContainerControllerTester controllerTester;
private ContainerTester tester;
@Before
public void before() {
controllerTester = new ContainerControllerTester(container, responseFiles);
tester = controllerTester.containerTester();
}
@Test
public void testApplicationApi() {
tester.computeVersionStatus();
tester.controller().jobController().setRunner(__ -> { });
createAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);
tester.assertResponse(request("/application/v4/", GET).userIdentity(USER_ID),
new File("root.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1", POST)
.userIdentity(USER_ID)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}")
.oktaAccessToken(OKTA_AT),
new File("tenant-without-applications.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1", PUT)
.userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}"),
new File("tenant-without-applications.json"));
tester.assertResponse(request("/application/v4/user", GET).userIdentity(USER_ID),
new File("user.json"));
tester.assertResponse(request("/application/v4/user", PUT).userIdentity(USER_ID),
"{\"message\":\"Created user 'by-myuser'\"}");
tester.assertResponse(request("/application/v4/user", GET).userIdentity(USER_ID),
new File("user-which-exists.json"));
tester.assertResponse(request("/application/v4/tenant/by-myuser", DELETE).userIdentity(USER_ID),
"{\"tenant\":\"by-myuser\",\"type\":\"USER\",\"applications\":[]}");
tester.assertResponse(request("/application/v4/tenant/", GET).userIdentity(USER_ID),
new File("tenant-list.json"));
createAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN_2, USER_ID);
registerContact(1234);
tester.assertResponse(request("/application/v4/tenant/tenant2", POST)
.userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT)
.data("{\"athensDomain\":\"domain2\", \"property\":\"property2\", \"propertyId\":\"1234\"}"),
new File("tenant-without-applications-with-id.json"));
tester.assertResponse(request("/application/v4/tenant/tenant2", PUT)
.userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT)
.data("{\"athensDomain\":\"domain2\", \"property\":\"property2\", \"propertyId\":\"1234\"}"),
new File("tenant-without-applications-with-id.json"));
updateContactInformation();
tester.assertResponse(request("/application/v4/tenant/tenant2", GET).userIdentity(USER_ID),
new File("tenant-with-contact-info.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1", POST)
.userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT),
new File("application-reference.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1", GET).userIdentity(USER_ID),
new File("tenant-with-application.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/", GET).userIdentity(USER_ID),
new File("application-list.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/", GET).userIdentity(USER_ID),
new File("application-list.json"));
addUserToHostedOperatorRole(HostedAthenzIdentities.from(HOSTED_VESPA_OPERATOR));
MultiPartStreamer entity = createApplicationDeployData(applicationPackageInstance1, true);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/dev/region/us-west-1/instance/instance1/deploy", POST)
.data(entity)
.header("X-Content-Hash", Base64.getEncoder().encodeToString(Signatures.sha256Digest(entity::data)))
.userIdentity(USER_ID),
new File("deploy-result.json"));
ApplicationId id = ApplicationId.from("tenant1", "application1", "instance1");
long screwdriverProjectId = 123;
addScrewdriverUserToDeployRole(SCREWDRIVER_ID,
ATHENZ_TENANT_DOMAIN,
new com.yahoo.vespa.hosted.controller.api.identifiers.ApplicationId(id.application().value()));
controllerTester.jobCompletion(JobType.component)
.application(id)
.projectId(screwdriverProjectId)
.uploadArtifact(applicationPackageInstance1)
.submit();
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/test/region/us-east-1/instance/instance1/", POST)
.data(createApplicationDeployData(Optional.empty(), false))
.screwdriverIdentity(SCREWDRIVER_ID),
new File("deploy-result.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/test/region/us-east-1/instance/instance1", DELETE)
.screwdriverIdentity(SCREWDRIVER_ID),
"{\"message\":\"Deactivated tenant1.application1.instance1 in test.us-east-1\"}");
controllerTester.jobCompletion(JobType.systemTest)
.application(id)
.projectId(screwdriverProjectId)
.submit();
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/staging/region/us-east-3/instance/instance1/", POST)
.data(createApplicationDeployData(Optional.empty(), false))
.screwdriverIdentity(SCREWDRIVER_ID),
new File("deploy-result.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/staging/region/us-east-3/instance/instance1", DELETE)
.screwdriverIdentity(SCREWDRIVER_ID),
"{\"message\":\"Deactivated tenant1.application1.instance1 in staging.us-east-3\"}");
controllerTester.jobCompletion(JobType.stagingTest)
.application(id)
.projectId(screwdriverProjectId)
.submit();
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/", POST)
.data(createApplicationDeployData(Optional.empty(), false))
.screwdriverIdentity(SCREWDRIVER_ID),
new File("deploy-result.json"));
controllerTester.jobCompletion(JobType.productionUsCentral1)
.application(id)
.projectId(screwdriverProjectId)
.unsuccessful()
.submit();
entity = createApplicationDeployData(Optional.empty(),
Optional.of(ApplicationVersion.from(BuildJob.defaultSourceRevision,
BuildJob.defaultBuildNumber - 1)),
true);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/", POST)
.data(entity)
.userIdentity(HOSTED_VESPA_OPERATOR),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"No application package found for tenant1.application1.instance1 with version 1.0.41-commit1\"}",
400);
entity = createApplicationDeployData(Optional.empty(),
Optional.of(ApplicationVersion.from(BuildJob.defaultSourceRevision,
BuildJob.defaultBuildNumber)),
true);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/", POST)
.data(entity)
.userIdentity(HOSTED_VESPA_OPERATOR),
new File("deploy-result.json"));
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.instances("instance1")
.environment(Environment.prod)
.region("us-west-1")
.build();
tester.assertResponse(request("/application/v4/tenant/tenant2/application/application2", POST)
.userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT),
new File("application-reference-2.json"));
ApplicationId app2 = ApplicationId.from("tenant2", "application2", "default");
long screwdriverProjectId2 = 456;
addScrewdriverUserToDeployRole(SCREWDRIVER_ID,
ATHENZ_TENANT_DOMAIN_2,
new com.yahoo.vespa.hosted.controller.api.identifiers.ApplicationId(app2.application().value()));
controllerTester.controller().applications().deploymentTrigger().triggerChange(TenantAndApplicationId.from(app2), Change.of(Version.fromString("7.0")));
controllerTester.jobCompletion(JobType.component)
.application(app2)
.projectId(screwdriverProjectId2)
.uploadArtifact(applicationPackage)
.submit();
tester.assertResponse(request("/application/v4/tenant/tenant2/application/application2", GET)
.userIdentity(USER_ID),
new File("application2.json"));
tester.assertResponse(request("/application/v4/tenant/tenant2/application/application2", GET)
.screwdriverIdentity(SCREWDRIVER_ID),
new File("application2.json"));
tester.assertResponse(request("/application/v4/tenant/tenant2/application/application2", PATCH)
.userIdentity(USER_ID)
.data("{\"majorVersion\":7}"),
"{\"message\":\"Set major version to 7\"}");
tester.assertResponse(request("/application/v4/tenant/tenant2/application/application2/key", POST)
.userIdentity(USER_ID)
.data("{\"key\":\"-----BEGIN PUBLIC KEY-----\n∠( ᐛ 」∠)_\n-----END PUBLIC KEY-----\"}"),
"{\"message\":\"Added deploy key -----BEGIN PUBLIC KEY-----\\n∠( ᐛ 」∠)_\\n-----END PUBLIC KEY-----\"}");
tester.assertResponse(request("/application/v4/tenant/tenant2/application/application2/instance/default", PATCH)
.userIdentity(USER_ID)
.data("{\"pemDeployKey\":\"-----BEGIN PUBLIC KEY-----\n∠( ᐛ 」∠)_\n-----END PUBLIC KEY-----\"}"),
"{\"message\":\"Added deploy key -----BEGIN PUBLIC KEY-----\\n∠( ᐛ 」∠)_\\n-----END PUBLIC KEY-----\"}");
tester.assertResponse(request("/application/v4/tenant/tenant2/application/application2", GET)
.userIdentity(USER_ID),
new File("application2-with-patches.json"));
tester.assertResponse(request("/application/v4/tenant/tenant2/application/application2", PATCH)
.userIdentity(USER_ID)
.data("{\"majorVersion\":null}"),
"{\"message\":\"Set major version to empty\"}");
tester.assertResponse(request("/application/v4/tenant/tenant2/application/application2/key", DELETE)
.userIdentity(USER_ID)
.data("{\"key\":\"-----BEGIN PUBLIC KEY-----\\n∠( ᐛ 」∠)_\\n-----END PUBLIC KEY-----\"}"),
"{\"message\":\"Removed deploy key -----BEGIN PUBLIC KEY-----\\n∠( ᐛ 」∠)_\\n-----END PUBLIC KEY-----\"}");
tester.assertResponse(request("/application/v4/tenant/tenant2/application/application2", GET)
.userIdentity(USER_ID),
new File("application2.json"));
tester.assertResponse(request("/application/v4/tenant/tenant2/application/application2", DELETE)
.userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT),
"{\"message\":\"Deleted application tenant2.application2\"}");
controllerTester.upgrader().overrideConfidence(Version.fromString("6.1"), VespaVersion.Confidence.broken);
tester.computeVersionStatus();
setDeploymentMaintainedInfo(controllerTester);
setZoneInRotation("rotation-fqdn-1", ZoneId.from("prod", "us-central-1"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1", GET)
.userIdentity(USER_ID),
new File("application.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1", GET)
.userIdentity(USER_ID),
new File("deployment.json"));
addIssues(controllerTester, TenantAndApplicationId.from("tenant1", "application1"));
tester.assertResponse(request("/application/v4/", GET)
.userIdentity(USER_ID)
.recursive("deployment"),
new File("recursive-root.json"));
tester.assertResponse(request("/application/v4/", GET)
.userIdentity(USER_ID)
.recursive("tenant"),
new File("recursive-until-tenant-root.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/", GET)
.userIdentity(USER_ID)
.recursive("true"),
new File("tenant1-recursive.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1", GET)
.userIdentity(USER_ID)
.recursive("true"),
new File("application1-recursive.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/nodes", GET)
.userIdentity(USER_ID),
new File("application-nodes.json"));
tester.assertResponse(request("/application/v4/tenant/tenant2/application/application1/environment/dev/region/us-central-1/instance/default/logs?from=1233&to=3214", GET)
.userIdentity(USER_ID),
"INFO - All good");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying", DELETE)
.userIdentity(HOSTED_VESPA_OPERATOR),
"{\"message\":\"Changed deployment from 'application change to 1.0.42-commit1' to 'no change' for application 'tenant1.application1'\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying", DELETE)
.userIdentity(USER_ID)
.data("{\"cancel\":\"all\"}"),
"{\"message\":\"No deployment in progress for application 'tenant1.application1' at this time\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying/pin", POST)
.userIdentity(USER_ID)
.data("6.1.0"),
"{\"message\":\"Triggered pin to 6.1 for tenant1.application1\"}");
assertTrue("Action is logged to audit log",
tester.controller().auditLogger().readLog().entries().stream()
.anyMatch(entry -> entry.resource().equals("/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying/pin")));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying", GET)
.userIdentity(USER_ID), "{\"platform\":\"6.1\",\"pinned\":true}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying/pin", GET)
.userIdentity(USER_ID), "{\"platform\":\"6.1\",\"pinned\":true}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying/pin", DELETE)
.userIdentity(USER_ID),
"{\"message\":\"Changed deployment from 'pin to 6.1' to 'upgrade to 6.1' for application 'tenant1.application1'\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying", GET)
.userIdentity(USER_ID), "{\"platform\":\"6.1\",\"pinned\":false}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying/pin", POST)
.userIdentity(USER_ID)
.data("6.1"),
"{\"message\":\"Triggered pin to 6.1 for tenant1.application1\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying", GET)
.userIdentity(USER_ID), "{\"platform\":\"6.1\",\"pinned\":true}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying/platform", DELETE)
.userIdentity(USER_ID),
"{\"message\":\"Changed deployment from 'pin to 6.1' to 'pin to current platform' for application 'tenant1.application1'\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying", GET)
.userIdentity(USER_ID), "{\"pinned\":true}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying/pin", DELETE)
.userIdentity(USER_ID),
"{\"message\":\"Changed deployment from 'pin to current platform' to 'no change' for application 'tenant1.application1'\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying", GET)
.userIdentity(USER_ID), "{}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/job/production-us-west-1/pause", POST)
.userIdentity(USER_ID),
"{\"message\":\"production-us-west-1 for tenant1.application1.instance1 paused for " + DeploymentTrigger.maxPause + "\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/job/production-us-west-1", POST)
.userIdentity(USER_ID),
"{\"message\":\"Triggered production-us-west-1 for tenant1.application1.instance1\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/restart", POST)
.userIdentity(USER_ID),
"{\"message\":\"Requested restart of tenant1.application1.instance1 in prod.us-central-1\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/restart", POST)
.screwdriverIdentity(SCREWDRIVER_ID),
"{\"message\":\"Requested restart of tenant1.application1.instance1 in prod.us-central-1\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/staging/region/us-central-1/instance/instance1/restart", POST)
.screwdriverIdentity(SCREWDRIVER_ID),
"{\"message\":\"Requested restart of tenant1.application1.instance1 in staging.us-central-1\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/test/region/us-central-1/instance/instance1/restart", POST)
.screwdriverIdentity(SCREWDRIVER_ID),
"{\"message\":\"Requested restart of tenant1.application1.instance1 in test.us-central-1\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/dev/region/us-central-1/instance/instance1/restart", POST)
.userIdentity(USER_ID),
"{\"message\":\"Requested restart of tenant1.application1.instance1 in dev.us-central-1\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/restart?hostname=host1", POST)
.screwdriverIdentity(SCREWDRIVER_ID),
"{\"error-code\":\"INTERNAL_SERVER_ERROR\",\"message\":\"No node with the hostname host1 is known.\"}", 500);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/suspended", GET)
.userIdentity(USER_ID),
new File("suspended.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/service", GET)
.userIdentity(USER_ID),
new File("services.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/service/storagenode-awe3slno6mmq2fye191y324jl/state/v1/", GET)
.userIdentity(USER_ID),
new File("service.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1", DELETE)
.userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT),
new File("delete-with-active-deployments.json"), 400);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/dev/region/us-west-1/instance/instance1", DELETE)
.userIdentity(USER_ID),
"{\"message\":\"Deactivated tenant1.application1.instance1 in dev.us-west-1\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1", DELETE)
.screwdriverIdentity(SCREWDRIVER_ID),
"{\"message\":\"Deactivated tenant1.application1.instance1 in prod.us-central-1\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1", DELETE)
.screwdriverIdentity(SCREWDRIVER_ID),
"{\"message\":\"Deactivated tenant1.application1.instance1 in prod.us-central-1\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/deploy/dev-us-east-1", POST)
.userIdentity(USER_ID)
.data(createApplicationDeployData(applicationPackage, false)),
new File("deployment-job-accepted.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/submit", POST)
.screwdriverIdentity(SCREWDRIVER_ID)
.data(createApplicationSubmissionData(applicationPackage)),
"{\"message\":\"Application package version: 1.0.43-d00d, source revision of repository 'repo', branch 'master' with commit 'd00d', by a@b, built against 6.1 at 1970-01-01T00:00:01Z\"}");
ApplicationPackage packageWithServiceForWrongDomain = new ApplicationPackageBuilder()
.instances("instance1")
.environment(Environment.prod)
.athenzIdentity(com.yahoo.config.provision.AthenzDomain.from(ATHENZ_TENANT_DOMAIN_2.getName()), AthenzService.from("service"))
.region("us-west-1")
.build();
configureAthenzIdentity(new com.yahoo.vespa.athenz.api.AthenzService(ATHENZ_TENANT_DOMAIN_2, "service"), true);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/submit", POST)
.screwdriverIdentity(SCREWDRIVER_ID)
.data(createApplicationSubmissionData(packageWithServiceForWrongDomain)),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"Athenz domain in deployment.xml: [domain2] must match tenant domain: [domain1]\"}", 400);
ApplicationPackage packageWithService = new ApplicationPackageBuilder()
.instances("instance1")
.environment(Environment.prod)
.athenzIdentity(com.yahoo.config.provision.AthenzDomain.from(ATHENZ_TENANT_DOMAIN.getName()), AthenzService.from("service"))
.region("us-west-1")
.build();
configureAthenzIdentity(new com.yahoo.vespa.athenz.api.AthenzService(ATHENZ_TENANT_DOMAIN, "service"), true);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/submit", POST)
.screwdriverIdentity(SCREWDRIVER_ID)
.data(createApplicationSubmissionData(packageWithService)),
"{\"message\":\"Application package version: 1.0.44-d00d, source revision of repository 'repo', branch 'master' with commit 'd00d', by a@b, built against 6.1 at 1970-01-01T00:00:01Z\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/submit", POST)
.screwdriverIdentity(SCREWDRIVER_ID)
.header("X-Content-Hash", "not/the/right/hash")
.data(createApplicationSubmissionData(packageWithService)),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"Value of X-Content-Hash header does not match computed content hash\"}", 400);
MultiPartStreamer streamer = createApplicationSubmissionData(packageWithService);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/submit", POST)
.screwdriverIdentity(SCREWDRIVER_ID)
.header("X-Content-Hash", Base64.getEncoder().encodeToString(Signatures.sha256Digest(streamer::data)))
.data(streamer),
"{\"message\":\"Application package version: 1.0.45-d00d, source revision of repository 'repo', branch 'master' with commit 'd00d', by a@b, built against 6.1 at 1970-01-01T00:00:01Z\"}");
ApplicationId app1 = ApplicationId.from("tenant1", "application1", "instance1");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/jobreport", POST)
.screwdriverIdentity(SCREWDRIVER_ID)
.data(asJson(DeploymentJobs.JobReport.ofComponent(app1,
1234,
123,
Optional.empty(),
BuildJob.defaultSourceRevision))),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"" + app1 + " is set up to be deployed from internally," +
" and no longer accepts submissions from Screwdriver v3 jobs. If you need to revert " +
"to the old pipeline, please file a ticket at yo/vespa-support and request this.\"}",
400);
assertEquals(2, tester.controller().applications().deploymentTrigger().triggerReadyJobs());
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/job", GET)
.userIdentity(USER_ID),
new File("jobs.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/job/system-test", GET)
.userIdentity(USER_ID),
new File("system-test-job.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/job/system-test/run/1", GET)
.userIdentity(USER_ID),
new File("system-test-details.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/job/staging-test", DELETE)
.userIdentity(USER_ID),
"{\"message\":\"Aborting run 1 of staging-test for tenant1.application1.instance1\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/submit", DELETE)
.userIdentity(HOSTED_VESPA_OPERATOR),
"{\"message\":\"Unregistered 'tenant1.application1.instance1' from internal deployment pipeline.\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/jobreport", POST)
.screwdriverIdentity(SCREWDRIVER_ID)
.data(asJson(DeploymentJobs.JobReport.ofComponent(app1,
1234,
123,
Optional.empty(),
BuildJob.defaultSourceRevision))),
"{\"message\":\"ok\"}");
byte[] data = new byte[0];
tester.assertResponse(request("/application/v4/user?user=new_user&domain=by", PUT)
.data(data)
.userIdentity(new UserId("new_user")),
new File("create-user-response.json"));
tester.assertResponse(request("/application/v4/user", GET)
.userIdentity(new UserId("other_user")),
"{\"user\":\"other_user\",\"tenants\":[],\"tenantExists\":false}");
tester.assertResponse(request("/application/v4/", Request.Method.OPTIONS)
.userIdentity(USER_ID),
"");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1", DELETE).userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT),
"{\"message\":\"Deleted instance tenant1.application1.instance1\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1", DELETE).userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT),
new File("tenant-without-applications.json"));
}
private void addIssues(ContainerControllerTester tester, TenantAndApplicationId id) {
tester.controller().applications().lockApplicationOrThrow(id, application ->
tester.controller().applications().store(application.withDeploymentIssueId(IssueId.from("123"))
.withOwnershipIssueId(IssueId.from("321"))
.withOwner(User.from("owner-username"))));
}
@Test
public void testRotationOverride() {
tester.computeVersionStatus();
createAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.instances("instance1")
.globalServiceId("foo")
.region("us-west-1")
.region("us-east-3")
.build();
ApplicationId id = createTenantAndApplication();
long projectId = 1;
MultiPartStreamer deployData = createApplicationDeployData(Optional.of(applicationPackage), false);
startAndTestChange(controllerTester, id, projectId, applicationPackage, deployData, 100);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-west-1/deploy", POST)
.data(deployData)
.screwdriverIdentity(SCREWDRIVER_ID),
new File("deploy-result.json"));
controllerTester.jobCompletion(JobType.productionUsWest1)
.application(id)
.projectId(projectId)
.submit();
setZoneInRotation("rotation-fqdn-1", ZoneId.from("prod", "us-west-1"));
tester.assertResponse(request("/application/v4/tenant/tenant2/application/application2/environment/prod/region/us-west-1/instance/default/global-rotation", GET)
.userIdentity(USER_ID),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"tenant2.application2 not found\"}",
400);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-east-3/global-rotation", GET)
.userIdentity(USER_ID),
"{\"error-code\":\"NOT_FOUND\",\"message\":\"application 'tenant1.application1.instance1' has no deployment in prod.us-east-3\"}",
404);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-east-3/global-rotation/override", PUT)
.userIdentity(USER_ID)
.data("{\"reason\":\"unit-test\"}"),
"{\"error-code\":\"NOT_FOUND\",\"message\":\"application 'tenant1.application1.instance1' has no deployment in prod.us-east-3\"}",
404);
setZoneInRotation("rotation-fqdn-1", ZoneId.from("prod", "us-west-1"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-west-1/global-rotation", GET)
.userIdentity(USER_ID),
new File("global-rotation.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-west-1/global-rotation/override", GET)
.userIdentity(USER_ID),
new File("global-rotation-get.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-west-1/global-rotation/override", PUT)
.userIdentity(USER_ID)
.data("{\"reason\":\"unit-test\"}"),
new File("global-rotation-put.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-west-1/global-rotation/override", DELETE)
.userIdentity(USER_ID)
.data("{\"reason\":\"unit-test\"}"),
new File("global-rotation-delete.json"));
}
@Test
@Test
public void testDeployDirectly() {
tester.computeVersionStatus();
createAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);
addUserToHostedOperatorRole(HostedAthenzIdentities.from(HOSTED_VESPA_OPERATOR));
tester.assertResponse(request("/application/v4/tenant/tenant1", POST).userIdentity(USER_ID)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}")
.oktaAccessToken(OKTA_AT),
new File("tenant-without-applications.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1", POST)
.userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT),
new File("application-reference.json"));
addScrewdriverUserToDeployRole(SCREWDRIVER_ID,
ATHENZ_TENANT_DOMAIN,
new com.yahoo.vespa.hosted.controller.api.identifiers.ApplicationId("application1"));
MultiPartStreamer entity = createApplicationDeployData(applicationPackageInstance1, true);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/deploy", POST)
.data(entity)
.screwdriverIdentity(SCREWDRIVER_ID),
new File("deploy-result.json"));
MultiPartStreamer noAppEntity = createApplicationDeployData(Optional.empty(), true);
tester.assertResponse(request("/application/v4/tenant/hosted-vespa/application/routing/environment/prod/region/us-central-1/instance/default/deploy", POST)
.data(noAppEntity)
.userIdentity(HOSTED_VESPA_OPERATOR),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"Deployment of system applications during a system upgrade is not allowed\"}",
400);
tester.upgradeSystem(tester.controller().versionStatus().controllerVersion().get().versionNumber());
tester.assertResponse(request("/application/v4/tenant/hosted-vespa/application/routing/environment/prod/region/us-central-1/instance/default/deploy", POST)
.data(noAppEntity)
.userIdentity(HOSTED_VESPA_OPERATOR),
new File("deploy-result.json"));
tester.assertResponse(request("/application/v4/tenant/hosted-vespa/application/proxy-host/environment/prod/region/us-central-1/instance/instance1/deploy", POST)
.data(noAppEntity)
.userIdentity(HOSTED_VESPA_OPERATOR),
new File("deploy-no-deployment.json"), 400);
}
@Test
public void testSortsDeploymentsAndJobs() {
tester.computeVersionStatus();
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.instances("instance1")
.region("us-east-3")
.build();
ApplicationId id = createTenantAndApplication();
long projectId = 1;
MultiPartStreamer deployData = createApplicationDeployData(Optional.empty(), false);
startAndTestChange(controllerTester, id, projectId, applicationPackage, deployData, 100);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-east-3/deploy", POST)
.data(deployData)
.screwdriverIdentity(SCREWDRIVER_ID),
new File("deploy-result.json"));
controllerTester.jobCompletion(JobType.productionUsEast3)
.application(id)
.projectId(projectId)
.submit();
applicationPackage = new ApplicationPackageBuilder()
.instances("instance1")
.globalServiceId("foo")
.region("us-west-1")
.region("us-east-3")
.build();
startAndTestChange(controllerTester, id, projectId, applicationPackage, deployData, 101);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-west-1/deploy", POST)
.data(deployData)
.screwdriverIdentity(SCREWDRIVER_ID),
new File("deploy-result.json"));
controllerTester.jobCompletion(JobType.productionUsWest1)
.application(id)
.projectId(projectId)
.submit();
setZoneInRotation("rotation-fqdn-1", ZoneId.from("prod", "us-west-1"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-east-3/deploy", POST)
.data(deployData)
.screwdriverIdentity(SCREWDRIVER_ID),
new File("deploy-result.json"));
controllerTester.jobCompletion(JobType.productionUsEast3)
.application(id)
.projectId(projectId)
.submit();
setDeploymentMaintainedInfo(controllerTester);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1", GET)
.userIdentity(USER_ID),
new File("application-without-change-multiple-deployments.json"));
}
@Test
public void testMeteringResponses() {
MockMeteringClient mockMeteringClient = (MockMeteringClient) controllerTester.containerTester().serviceRegistry().meteringService();
ResourceAllocation currentSnapshot = new ResourceAllocation(1, 2, 3);
ResourceAllocation thisMonth = new ResourceAllocation(12, 24, 1000);
ResourceAllocation lastMonth = new ResourceAllocation(24, 48, 2000);
ApplicationId applicationId = ApplicationId.from("doesnotexist", "doesnotexist", "default");
Map<ApplicationId, List<ResourceSnapshot>> snapshotHistory = Map.of(applicationId, List.of(
new ResourceSnapshot(applicationId, 1, 2,3, Instant.ofEpochMilli(123)),
new ResourceSnapshot(applicationId, 1, 2,3, Instant.ofEpochMilli(246)),
new ResourceSnapshot(applicationId, 1, 2,3, Instant.ofEpochMilli(492))));
mockMeteringClient.setMeteringInfo(new MeteringInfo(thisMonth, lastMonth, currentSnapshot, snapshotHistory));
tester.assertResponse(request("/application/v4/tenant/doesnotexist/application/doesnotexist/metering", GET)
.userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT),
new File("application1-metering.json"));
}
@Test
public void testErrorResponses() throws Exception {
tester.computeVersionStatus();
createAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);
tester.assertResponse(request("/application/v4/tenant/tenant1", PUT)
.userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}"),
"{\n \"code\" : 403,\n \"message\" : \"Access denied\"\n}",
403);
tester.assertResponse(request("/application/v4/tenant/tenant1", GET)
.userIdentity(USER_ID),
"{\"error-code\":\"NOT_FOUND\",\"message\":\"Tenant 'tenant1' does not exist\"}",
404);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1", GET)
.userIdentity(USER_ID),
"{\"error-code\":\"NOT_FOUND\",\"message\":\"tenant1.application1 not found\"}",
404);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-east/instance/default", GET)
.userIdentity(USER_ID),
"{\"error-code\":\"NOT_FOUND\",\"message\":\"tenant1.application1 not found\"}",
404);
tester.assertResponse(request("/application/v4/tenant/tenant1", POST)
.userIdentity(USER_ID)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}")
.oktaAccessToken(OKTA_AT),
new File("tenant-without-applications.json"));
tester.assertResponse(request("/application/v4/tenant/tenant2", POST)
.userIdentity(USER_ID)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}")
.oktaAccessToken(OKTA_AT),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"Could not create tenant 'tenant2': The Athens domain 'domain1' is already connected to tenant 'tenant1'\"}",
400);
tester.assertResponse(request("/application/v4/tenant/tenant1", POST)
.userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}"),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"Tenant 'tenant1' already exists\"}",
400);
tester.assertResponse(request("/application/v4/tenant/my_tenant_2", POST)
.userIdentity(USER_ID)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}")
.oktaAccessToken(OKTA_AT),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"New tenant or application names must start with a letter, may contain no more than 20 characters, and may only contain lowercase letters, digits or dashes, but no double-dashes.\"}",
400);
tester.assertResponse(request("/application/v4/tenant/by-tenant2", POST)
.userIdentity(USER_ID)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}")
.oktaAccessToken(OKTA_AT),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"Athenz tenant name cannot have prefix 'by-'\"}",
400);
tester.assertResponse(request("/application/v4/tenant/hosted-vespa", POST)
.userIdentity(USER_ID)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}")
.oktaAccessToken(OKTA_AT),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"Tenant 'hosted-vespa' already exists\"}",
400);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1", POST)
.userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT),
new File("application-reference.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1", POST)
.oktaAccessToken(OKTA_AT)
.userIdentity(USER_ID),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"Could not create 'tenant1.application1.instance1': Application already exists\"}",
400);
ConfigServerMock configServer = serviceRegistry().configServerMock();
configServer.throwOnNextPrepare(new ConfigServerException(new URI("server-url"), "Failed to prepare application", ConfigServerException.ErrorCode.INVALID_APPLICATION_PACKAGE, null));
MultiPartStreamer entity = createApplicationDeployData(applicationPackageInstance1, true);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/dev/region/us-west-1/instance/instance1/deploy", POST)
.data(entity)
.userIdentity(USER_ID),
new File("deploy-failure.json"), 400);
configServer.throwOnNextPrepare(new ConfigServerException(new URI("server-url"), "Failed to prepare application", ConfigServerException.ErrorCode.OUT_OF_CAPACITY, null));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/dev/region/us-west-1/instance/instance1/deploy", POST)
.data(entity)
.userIdentity(USER_ID),
new File("deploy-out-of-capacity.json"), 400);
configServer.throwOnNextPrepare(new ConfigServerException(new URI("server-url"), "Failed to activate application", ConfigServerException.ErrorCode.ACTIVATION_CONFLICT, null));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/dev/region/us-west-1/instance/instance1/deploy", POST)
.data(entity)
.userIdentity(USER_ID),
new File("deploy-activation-conflict.json"), 409);
configServer.throwOnNextPrepare(new ConfigServerException(new URI("server-url"), "Internal server error", ConfigServerException.ErrorCode.INTERNAL_SERVER_ERROR, null));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/dev/region/us-west-1/instance/instance1/deploy", POST)
.data(entity)
.userIdentity(USER_ID),
new File("deploy-internal-server-error.json"), 500);
tester.assertResponse(request("/application/v4/tenant/tenant1", DELETE)
.userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"Could not delete tenant 'tenant1': This tenant has active applications\"}",
400);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1", DELETE)
.userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT),
"{\"message\":\"Deleted instance tenant1.application1.instance1\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1", DELETE)
.oktaAccessToken(OKTA_AT)
.userIdentity(USER_ID),
"{\"error-code\":\"NOT_FOUND\",\"message\":\"Could not delete application 'tenant1.application1.instance1': Application not found\"}",
404);
tester.assertResponse(request("/application/v4/tenant/tenant1", DELETE)
.userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT),
new File("tenant-without-applications.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1", DELETE)
.userIdentity(USER_ID),
"{\n \"code\" : 403,\n \"message\" : \"Access denied\"\n}",
403);
tester.controller().curator().writeTenant(new AthenzTenant(TenantName.from("my_tenant"), ATHENZ_TENANT_DOMAIN,
new Property("property1"), Optional.empty(), Optional.empty()));
tester.assertResponse(request("/application/v4/tenant/my-tenant", POST)
.userIdentity(USER_ID)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}")
.oktaAccessToken(OKTA_AT),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"Tenant 'my-tenant' already exists\"}",
400);
}
@Test
public void testAuthorization() {
UserId authorizedUser = USER_ID;
UserId unauthorizedUser = new UserId("othertenant");
tester.assertResponse(request("/application/v4/tenant/tenant1", POST)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}"),
"{\n \"message\" : \"Not authenticated\"\n}",
401);
tester.assertResponse(request("/application/v4/tenant/", GET)
.userIdentity(USER_ID)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}"),
"[]",
200);
createAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);
tester.assertResponse(request("/application/v4/tenant/tenant1", POST)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}")
.oktaAccessToken(OKTA_AT)
.userIdentity(unauthorizedUser),
"{\"error-code\":\"FORBIDDEN\",\"message\":\"The user 'user.othertenant' is not admin in Athenz domain 'domain1'\"}",
403);
tester.assertResponse(request("/application/v4/tenant/tenant1", POST)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}")
.userIdentity(authorizedUser)
.oktaAccessToken(OKTA_AT),
new File("tenant-without-applications.json"),
200);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1", POST)
.userIdentity(unauthorizedUser)
.oktaAccessToken(OKTA_AT),
"{\n \"code\" : 403,\n \"message\" : \"Access denied\"\n}",
403);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1", POST)
.userIdentity(authorizedUser)
.oktaAccessToken(OKTA_AT),
new File("application-reference.json"),
200);
MultiPartStreamer entity = createApplicationDeployData(applicationPackageDefault, true);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-west-1/instance/default/deploy", POST)
.data(entity)
.userIdentity(USER_ID),
"{\n \"code\" : 403,\n \"message\" : \"Access denied\"\n}",
403);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1", DELETE)
.userIdentity(unauthorizedUser),
"{\n \"code\" : 403,\n \"message\" : \"Access denied\"\n}",
403);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/default", POST)
.userIdentity(authorizedUser)
.oktaAccessToken(OKTA_AT),
new File("application-reference-default.json"),
200);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1", DELETE)
.userIdentity(authorizedUser)
.oktaAccessToken(OKTA_AT),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"Could not delete application; more than one instance present: [tenant1.application1, tenant1.application1.instance1]\"}",
400);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/default", DELETE)
.userIdentity(authorizedUser)
.oktaAccessToken(OKTA_AT),
"{\"message\":\"Deleted instance tenant1.application1.default\"}",
200);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1", DELETE)
.userIdentity(authorizedUser)
.oktaAccessToken(OKTA_AT),
"{\"message\":\"Deleted application tenant1.application1\"}",
200);
tester.assertResponse(request("/application/v4/tenant/tenant1", PUT)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}")
.userIdentity(unauthorizedUser),
"{\n \"code\" : 403,\n \"message\" : \"Access denied\"\n}",
403);
createAthenzDomainWithAdmin(new AthenzDomain("domain2"), USER_ID);
tester.assertResponse(request("/application/v4/tenant/tenant1", PUT)
.data("{\"athensDomain\":\"domain2\", \"property\":\"property1\"}")
.userIdentity(authorizedUser)
.oktaAccessToken(OKTA_AT),
"{\"tenant\":\"tenant1\",\"type\":\"ATHENS\",\"athensDomain\":\"domain2\",\"property\":\"property1\",\"applications\":[]}",
200);
tester.assertResponse(request("/application/v4/tenant/tenant1", DELETE)
.userIdentity(unauthorizedUser),
"{\n \"code\" : 403,\n \"message\" : \"Access denied\"\n}",
403);
}
@Test
public void deployment_fails_on_illegal_domain_in_deployment_spec() {
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.upgradePolicy("default")
.athenzIdentity(com.yahoo.config.provision.AthenzDomain.from("another.domain"), com.yahoo.config.provision.AthenzService.from("service"))
.environment(Environment.prod)
.region("us-west-1")
.build();
long screwdriverProjectId = 123;
createAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);
configureAthenzIdentity(new com.yahoo.vespa.athenz.api.AthenzService(new AthenzDomain("another.domain"), "service"), true);
Application application = controllerTester.createApplication(ATHENZ_TENANT_DOMAIN.getName(), "tenant1", "application1", "default");
ScrewdriverId screwdriverId = new ScrewdriverId(Long.toString(screwdriverProjectId));
controllerTester.authorize(ATHENZ_TENANT_DOMAIN, screwdriverId, ApplicationAction.deploy, application.id());
controllerTester.jobCompletion(JobType.component)
.application(application)
.projectId(screwdriverProjectId)
.uploadArtifact(applicationPackage)
.submit();
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/test/region/us-east-1/instance/default/", POST)
.data(createApplicationDeployData(applicationPackage, false))
.screwdriverIdentity(screwdriverId),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"Athenz domain in deployment.xml: [another.domain] must match tenant domain: [domain1]\"}",
400);
}
@Test
public void deployment_succeeds_when_correct_domain_is_used() {
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.upgradePolicy("default")
.athenzIdentity(com.yahoo.config.provision.AthenzDomain.from("domain1"), com.yahoo.config.provision.AthenzService.from("service"))
.environment(Environment.prod)
.region("us-west-1")
.build();
long screwdriverProjectId = 123;
ScrewdriverId screwdriverId = new ScrewdriverId(Long.toString(screwdriverProjectId));
createAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);
configureAthenzIdentity(new com.yahoo.vespa.athenz.api.AthenzService(ATHENZ_TENANT_DOMAIN, "service"), true);
Application application = controllerTester.createApplication(ATHENZ_TENANT_DOMAIN.getName(), "tenant1", "application1", "default");
controllerTester.authorize(ATHENZ_TENANT_DOMAIN, screwdriverId, ApplicationAction.deploy, application.id());
controllerTester.jobCompletion(JobType.component)
.application(application)
.projectId(screwdriverProjectId)
.uploadArtifact(applicationPackage)
.submit();
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/test/region/us-east-1/instance/default/", POST)
.data(createApplicationDeployData(applicationPackage, false))
.screwdriverIdentity(screwdriverId),
new File("deploy-result.json"));
}
@Test
public void deployment_fails_for_personal_tenants_when_athenzdomain_specified_and_user_not_admin() {
tester.computeVersionStatus();
UserId tenantAdmin = new UserId("tenant-admin");
UserId userId = new UserId("new-user");
createAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, tenantAdmin);
configureAthenzIdentity(new com.yahoo.vespa.athenz.api.AthenzService(ATHENZ_TENANT_DOMAIN, "service"), true);
byte[] data = new byte[0];
tester.assertResponse(request("/application/v4/user?user=new_user&domain=by", PUT)
.data(data)
.userIdentity(userId),
new File("create-user-response.json"));
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.upgradePolicy("default")
.athenzIdentity(com.yahoo.config.provision.AthenzDomain.from("domain1"), com.yahoo.config.provision.AthenzService.from("service"))
.environment(Environment.dev)
.region("us-west-1")
.build();
String expectedResult="{\"error-code\":\"BAD_REQUEST\",\"message\":\"User user.new-user is not allowed to launch services in Athenz domain domain1. Please reach out to the domain admin.\"}";
MultiPartStreamer entity = createApplicationDeployData(applicationPackage, true);
tester.assertResponse(request("/application/v4/tenant/by-new-user/application/application1/environment/dev/region/us-west-1/instance/default", POST)
.data(entity)
.userIdentity(userId),
expectedResult,
400);
}
@Test
public void deployment_succeeds_for_personal_tenants_when_user_is_tenant_admin() {
tester.computeVersionStatus();
UserId tenantAdmin = new UserId("new_user");
createAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, tenantAdmin);
configureAthenzIdentity(new com.yahoo.vespa.athenz.api.AthenzService(ATHENZ_TENANT_DOMAIN, "service"), true);
byte[] data = new byte[0];
tester.assertResponse(request("/application/v4/user?user=new_user&domain=by", PUT)
.data(data)
.userIdentity(tenantAdmin),
new File("create-user-response.json"));
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.upgradePolicy("default")
.athenzIdentity(com.yahoo.config.provision.AthenzDomain.from("domain1"), com.yahoo.config.provision.AthenzService.from("service"))
.environment(Environment.dev)
.region("us-west-1")
.build();
MultiPartStreamer entity = createApplicationDeployData(applicationPackage, true);
tester.assertResponse(request("/application/v4/tenant/by-new-user/application/application1/environment/dev/region/us-west-1/instance/default", POST)
.data(entity)
.userIdentity(tenantAdmin),
new File("deploy-result.json"));
}
@Test
public void deployment_fails_when_athenz_service_cannot_be_launched() {
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.upgradePolicy("default")
.athenzIdentity(com.yahoo.config.provision.AthenzDomain.from("domain1"), com.yahoo.config.provision.AthenzService.from("service"))
.environment(Environment.prod)
.region("us-west-1")
.build();
long screwdriverProjectId = 123;
ScrewdriverId screwdriverId = new ScrewdriverId(Long.toString(screwdriverProjectId));
createAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);
configureAthenzIdentity(new com.yahoo.vespa.athenz.api.AthenzService(ATHENZ_TENANT_DOMAIN, "service"), false);
Application application = controllerTester.createApplication(ATHENZ_TENANT_DOMAIN.getName(), "tenant1", "application1", "default");
controllerTester.authorize(ATHENZ_TENANT_DOMAIN, screwdriverId, ApplicationAction.deploy, application.id());
controllerTester.jobCompletion(JobType.component)
.application(application)
.projectId(screwdriverProjectId)
.uploadArtifact(applicationPackage)
.submit();
String expectedResult="{\"error-code\":\"BAD_REQUEST\",\"message\":\"Not allowed to launch Athenz service domain1.service\"}";
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/test/region/us-east-1/instance/default/", POST)
.data(createApplicationDeployData(applicationPackage, false))
.screwdriverIdentity(screwdriverId),
expectedResult,
400);
}
@Test
public void redeployment_succeeds_when_not_specifying_versions_or_application_package() {
addUserToHostedOperatorRole(HostedAthenzIdentities.from(HOSTED_VESPA_OPERATOR));
tester.computeVersionStatus();
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.upgradePolicy("default")
.athenzIdentity(com.yahoo.config.provision.AthenzDomain.from("domain1"), com.yahoo.config.provision.AthenzService.from("service"))
.environment(Environment.prod)
.region("us-west-1")
.build();
long screwdriverProjectId = 123;
ScrewdriverId screwdriverId = new ScrewdriverId(Long.toString(screwdriverProjectId));
createAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);
configureAthenzIdentity(new com.yahoo.vespa.athenz.api.AthenzService(ATHENZ_TENANT_DOMAIN, "service"), true);
Application application = controllerTester.createApplication(ATHENZ_TENANT_DOMAIN.getName(), "tenant1", "application1", "default");
controllerTester.authorize(ATHENZ_TENANT_DOMAIN, screwdriverId, ApplicationAction.deploy, application.id());
controllerTester.jobCompletion(JobType.component)
.application(application)
.projectId(screwdriverProjectId)
.uploadArtifact(applicationPackage)
.submit();
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/test/region/us-east-1/instance/default/", POST)
.data(createApplicationDeployData(applicationPackage, false))
.screwdriverIdentity(screwdriverId),
new File("deploy-result.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/test/region/us-east-1/instance/default/", POST)
.data(createApplicationDeployData(Optional.empty(), true))
.userIdentity(HOSTED_VESPA_OPERATOR),
new File("deploy-result.json"));
}
@Test
public void testJobStatusReporting() {
addUserToHostedOperatorRole(HostedAthenzIdentities.from(HOSTED_VESPA_OPERATOR));
tester.computeVersionStatus();
long projectId = 1;
Application app = controllerTester.createApplication();
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.environment(Environment.prod)
.region("us-central-1")
.build();
Version vespaVersion = new Version("6.1");
BuildJob job = new BuildJob(report -> notifyCompletion(report, controllerTester), controllerTester.containerTester().serviceRegistry().artifactRepositoryMock())
.application(app)
.projectId(projectId);
job.type(JobType.component).uploadArtifact(applicationPackage).submit();
controllerTester.deploy(app.id().defaultInstance(), applicationPackage, TEST_ZONE);
job.type(JobType.systemTest).submit();
Request request = request("/application/v4/tenant/tenant1/application/application1/jobreport", POST)
.data(asJson(job.type(JobType.systemTest).report()))
.userIdentity(HOSTED_VESPA_OPERATOR)
.get();
tester.assertResponse(request, "{\"error-code\":\"BAD_REQUEST\",\"message\":\"Notified of completion " +
"of system-test for tenant1.application1, but that has not been triggered; last was " +
controllerTester.controller().applications().requireInstance(app.id().defaultInstance()).deploymentJobs().jobStatus().get(JobType.systemTest).lastTriggered().get().at() + "\"}", 400);
request = request("/application/v4/tenant/tenant1/application/application1/jobreport", POST)
.data(asJson(job.type(JobType.productionUsEast3).report()))
.userIdentity(HOSTED_VESPA_OPERATOR)
.get();
tester.assertResponse(request, "{\"error-code\":\"BAD_REQUEST\",\"message\":\"Notified of completion " +
"of production-us-east-3 for tenant1.application1, but that has not been triggered; last was never\"}",
400);
JobStatus recordedStatus =
tester.controller().applications().getInstance(app.id().defaultInstance()).get().deploymentJobs().jobStatus().get(JobType.component);
assertNotNull("Status was recorded", recordedStatus);
assertTrue(recordedStatus.isSuccess());
assertEquals(vespaVersion, recordedStatus.lastCompleted().get().platform());
recordedStatus =
tester.controller().applications().getInstance(app.id().defaultInstance()).get().deploymentJobs().jobStatus().get(JobType.productionApNortheast2);
assertNull("Status of never-triggered jobs is empty", recordedStatus);
assertTrue("All jobs have been run", tester.controller().applications().deploymentTrigger().jobsToRun().isEmpty());
}
@Test
public void testJobStatusReportingOutOfCapacity() {
controllerTester.containerTester().computeVersionStatus();
long projectId = 1;
Application app = controllerTester.createApplication();
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.environment(Environment.prod)
.region("us-central-1")
.build();
BuildJob job = new BuildJob(report -> notifyCompletion(report, controllerTester), controllerTester.containerTester().serviceRegistry().artifactRepositoryMock())
.application(app)
.projectId(projectId);
job.type(JobType.component).uploadArtifact(applicationPackage).submit();
controllerTester.deploy(app.id().defaultInstance(), applicationPackage, TEST_ZONE);
job.type(JobType.systemTest).submit();
controllerTester.deploy(app.id().defaultInstance(), applicationPackage, STAGING_ZONE);
job.type(JobType.stagingTest).error(DeploymentJobs.JobError.outOfCapacity).submit();
JobStatus jobStatus = tester.controller().applications().getInstance(app.id().defaultInstance()).get()
.deploymentJobs()
.jobStatus()
.get(JobType.stagingTest);
assertFalse(jobStatus.isSuccess());
assertEquals(DeploymentJobs.JobError.outOfCapacity, jobStatus.jobError().get());
}
@Test
public void applicationWithRoutingPolicy() {
Application app = controllerTester.createApplication();
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.environment(Environment.prod)
.region("us-west-1")
.build();
controllerTester.deployCompletely(app, applicationPackage, 1, false);
RoutingPolicy policy = new RoutingPolicy(app.id().defaultInstance(),
ClusterSpec.Id.from("default"),
ZoneId.from(Environment.prod, RegionName.from("us-west-1")),
HostName.from("lb-0-canonical-name"),
Optional.of("dns-zone-1"), Set.of(EndpointId.of("c0")));
tester.controller().curator().writeRoutingPolicies(app.id().defaultInstance(), Set.of(policy));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1", GET)
.userIdentity(USER_ID),
new File("application-with-routing-policy.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-west-1/instance/default", GET)
.userIdentity(USER_ID),
new File("deployment-with-routing-policy.json"));
}
private void notifyCompletion(DeploymentJobs.JobReport report, ContainerControllerTester tester) {
assertResponse(request("/application/v4/tenant/tenant1/application/application1/jobreport", POST)
.userIdentity(HOSTED_VESPA_OPERATOR)
.data(asJson(report))
.get(),
200, "{\"message\":\"ok\"}");
tester.controller().applications().deploymentTrigger().triggerReadyJobs();
}
private static byte[] asJson(DeploymentJobs.JobReport report) {
Slime slime = new Slime();
Cursor cursor = slime.setObject();
cursor.setLong("projectId", report.projectId());
cursor.setString("jobName", report.jobType().jobName());
cursor.setLong("buildNumber", report.buildNumber());
report.jobError().ifPresent(jobError -> cursor.setString("jobError", jobError.name()));
report.version().flatMap(ApplicationVersion::source).ifPresent(sr -> {
Cursor sourceRevision = cursor.setObject("sourceRevision");
sourceRevision.setString("repository", sr.repository());
sourceRevision.setString("branch", sr.branch());
sourceRevision.setString("commit", sr.commit());
});
cursor.setString("tenant", report.applicationId().tenant().value());
cursor.setString("application", report.applicationId().application().value());
cursor.setString("instance", report.applicationId().instance().value());
try {
return SlimeUtils.toJsonBytes(slime);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
private MultiPartStreamer createApplicationDeployData(ApplicationPackage applicationPackage, boolean deployDirectly) {
return createApplicationDeployData(Optional.of(applicationPackage), deployDirectly);
}
private MultiPartStreamer createApplicationDeployData(Optional<ApplicationPackage> applicationPackage, boolean deployDirectly) {
return createApplicationDeployData(applicationPackage, Optional.empty(), deployDirectly);
}
private MultiPartStreamer createApplicationDeployData(Optional<ApplicationPackage> applicationPackage,
Optional<ApplicationVersion> applicationVersion, boolean deployDirectly) {
MultiPartStreamer streamer = new MultiPartStreamer();
streamer.addJson("deployOptions", deployOptions(deployDirectly, applicationVersion));
applicationPackage.ifPresent(ap -> streamer.addBytes("applicationZip", ap.zippedContent()));
return streamer;
}
private MultiPartStreamer createApplicationSubmissionData(ApplicationPackage applicationPackage) {
return new MultiPartStreamer().addJson(EnvironmentResource.SUBMIT_OPTIONS, "{\"repository\":\"repo\",\"branch\":\"master\",\"commit\":\"d00d\",\"authorEmail\":\"a@b\"}")
.addBytes(EnvironmentResource.APPLICATION_ZIP, applicationPackage.zippedContent())
.addBytes(EnvironmentResource.APPLICATION_TEST_ZIP, "content".getBytes());
}
private String deployOptions(boolean deployDirectly, Optional<ApplicationVersion> applicationVersion) {
return "{\"vespaVersion\":null," +
"\"ignoreValidationErrors\":false," +
"\"deployDirectly\":" + deployDirectly +
applicationVersion.map(version ->
"," +
"\"buildNumber\":" + version.buildNumber().getAsLong() + "," +
"\"sourceRevision\":{" +
"\"repository\":\"" + version.source().get().repository() + "\"," +
"\"branch\":\"" + version.source().get().branch() + "\"," +
"\"commit\":\"" + version.source().get().commit() + "\"" +
"}"
).orElse("") +
"}";
}
/** Make a request with (athens) user domain1.mytenant */
private RequestBuilder request(String path, Request.Method method) {
return new RequestBuilder(path, method);
}
/**
* In production this happens outside hosted Vespa, so there is no API for it and we need to reach down into the
* mock setup to replicate the action.
*/
private void createAthenzDomainWithAdmin(AthenzDomain domain, UserId userId) {
AthenzClientFactoryMock mock = (AthenzClientFactoryMock) container.components()
.getComponent(AthenzClientFactoryMock.class.getName());
AthenzDbMock.Domain domainMock = mock.getSetup().getOrCreateDomain(domain);
domainMock.markAsVespaTenant();
domainMock.admin(AthenzUser.fromUserId(userId.id()));
}
/**
* Mock athenz service identity configuration. Simulates that configserver is allowed to launch a service
*/
private void configureAthenzIdentity(com.yahoo.vespa.athenz.api.AthenzService service, boolean allowLaunch) {
AthenzClientFactoryMock mock = (AthenzClientFactoryMock) container.components()
.getComponent(AthenzClientFactoryMock.class.getName());
AthenzDbMock.Domain domainMock = mock.getSetup().domains.computeIfAbsent(service.getDomain(), AthenzDbMock.Domain::new);
domainMock.services.put(service.getName(), new AthenzDbMock.Service(allowLaunch));
}
/**
* In production this happens outside hosted Vespa, so there is no API for it and we need to reach down into the
* mock setup to replicate the action.
*/
private void addScrewdriverUserToDeployRole(ScrewdriverId screwdriverId,
AthenzDomain domain,
com.yahoo.vespa.hosted.controller.api.identifiers.ApplicationId applicationId) {
AthenzClientFactoryMock mock = (AthenzClientFactoryMock) container.components()
.getComponent(AthenzClientFactoryMock.class.getName());
AthenzIdentity screwdriverIdentity = HostedAthenzIdentities.from(screwdriverId);
AthenzDbMock.Application athenzApplication = mock.getSetup().domains.get(domain).applications.get(applicationId);
athenzApplication.addRoleMember(ApplicationAction.deploy, screwdriverIdentity);
}
private ApplicationId createTenantAndApplication() {
createAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);
tester.assertResponse(request("/application/v4/tenant/tenant1", POST)
.userIdentity(USER_ID)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}")
.oktaAccessToken(OKTA_AT),
new File("tenant-without-applications.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1", POST)
.userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT),
new File("application-reference.json"));
addScrewdriverUserToDeployRole(SCREWDRIVER_ID, ATHENZ_TENANT_DOMAIN,
new com.yahoo.vespa.hosted.controller.api.identifiers.ApplicationId("application1"));
return ApplicationId.from("tenant1", "application1", "instance1");
}
private void startAndTestChange(ContainerControllerTester controllerTester, ApplicationId application,
long projectId, ApplicationPackage applicationPackage,
MultiPartStreamer deployData, long buildNumber) {
ContainerTester tester = controllerTester.containerTester();
controllerTester.containerTester().serviceRegistry().artifactRepositoryMock()
.put(application, applicationPackage,"1.0." + buildNumber + "-commit1");
controllerTester.jobCompletion(JobType.component)
.application(application)
.projectId(projectId)
.buildNumber(buildNumber)
.submit();
String testPath = String.format("/application/v4/tenant/%s/application/%s/instance/%s/environment/test/region/us-east-1",
application.tenant().value(), application.application().value(), application.instance().value());
tester.assertResponse(request(testPath, POST)
.data(deployData)
.screwdriverIdentity(SCREWDRIVER_ID),
new File("deploy-result.json"));
tester.assertResponse(request(testPath, DELETE)
.screwdriverIdentity(SCREWDRIVER_ID),
"{\"message\":\"Deactivated " + application + " in test.us-east-1\"}");
controllerTester.jobCompletion(JobType.systemTest)
.application(application)
.projectId(projectId)
.submit();
String stagingPath = String.format("/application/v4/tenant/%s/application/%s/instance/%s/environment/staging/region/us-east-3",
application.tenant().value(), application.application().value(), application.instance().value());
tester.assertResponse(request(stagingPath, POST)
.data(deployData)
.screwdriverIdentity(SCREWDRIVER_ID),
new File("deploy-result.json"));
tester.assertResponse(request(stagingPath, DELETE)
.screwdriverIdentity(SCREWDRIVER_ID),
"{\"message\":\"Deactivated " + application + " in staging.us-east-3\"}");
controllerTester.jobCompletion(JobType.stagingTest)
.application(application)
.projectId(projectId)
.submit();
}
/**
* Cluster info, utilization and application and deployment metrics are maintained async by maintainers.
*
* This sets these values as if the maintainers has been ran.
*/
private void setDeploymentMaintainedInfo(ContainerControllerTester controllerTester) {
for (Application application : controllerTester.controller().applications().asList()) {
controllerTester.controller().applications().lockApplicationOrThrow(application.id(), lockedApplication -> {
lockedApplication = lockedApplication.with(new ApplicationMetrics(0.5, 0.7));
for (Instance instance : application.instances().values()) {
for (Deployment deployment : instance.deployments().values()) {
Map<ClusterSpec.Id, ClusterInfo> clusterInfo = new HashMap<>();
List<String> hostnames = new ArrayList<>();
hostnames.add("host1");
hostnames.add("host2");
clusterInfo.put(ClusterSpec.Id.from("cluster1"),
new ClusterInfo("flavor1", 37, 2, 4, 50,
ClusterSpec.Type.content, hostnames));
Map<ClusterSpec.Id, ClusterUtilization> clusterUtils = new HashMap<>();
clusterUtils.put(ClusterSpec.Id.from("cluster1"), new ClusterUtilization(0.3, 0.6, 0.4, 0.3));
DeploymentMetrics metrics = new DeploymentMetrics(1, 2, 3, 4, 5,
Optional.of(Instant.ofEpochMilli(123123)), Map.of());
lockedApplication = lockedApplication.with(instance.name(),
lockedInstance -> lockedInstance.withClusterInfo(deployment.zone(), clusterInfo)
.withClusterUtilization(deployment.zone(), clusterUtils)
.with(deployment.zone(), metrics)
.recordActivityAt(Instant.parse("2018-06-01T10:15:30.00Z"), deployment.zone()));
}
controllerTester.controller().applications().store(lockedApplication);
}
});
}
}
private ServiceRegistryMock serviceRegistry() {
return (ServiceRegistryMock) tester.container().components().getComponent(ServiceRegistryMock.class.getName());
}
private void setZoneInRotation(String rotationName, ZoneId zone) {
serviceRegistry().globalRoutingServiceMock().setStatus(rotationName, zone, com.yahoo.vespa.hosted.controller.api.integration.routing.RotationStatus.IN);
new RotationStatusUpdater(tester.controller(), Duration.ofDays(1), new JobControl(tester.controller().curator())).run();
}
private RotationStatus rotationStatus(Instance instance) {
return controllerTester.controller().applications().rotationRepository().getRotation(instance)
.map(rotation -> {
var rotationStatus = controllerTester.controller().serviceRegistry().globalRoutingService().getHealthStatus(rotation.name());
var statusMap = new LinkedHashMap<ZoneId, RotationState>();
rotationStatus.forEach((zone, status) -> statusMap.put(zone, RotationState.in));
return RotationStatus.from(Map.of(rotation.id(), statusMap));
})
.orElse(RotationStatus.EMPTY);
}
private void updateContactInformation() {
Contact contact = new Contact(URI.create("www.contacts.tld/1234"),
URI.create("www.properties.tld/1234"),
URI.create("www.issues.tld/1234"),
List.of(List.of("alice"), List.of("bob")), "queue", Optional.empty());
tester.controller().tenants().lockIfPresent(TenantName.from("tenant2"),
LockedTenant.Athenz.class,
lockedTenant -> tester.controller().tenants().store(lockedTenant.with(contact)));
}
private void registerContact(long propertyId) {
PropertyId p = new PropertyId(String.valueOf(propertyId));
serviceRegistry().contactRetrieverMock().addContact(p, new Contact(URI.create("www.issues.tld/" + p.id()),
URI.create("www.contacts.tld/" + p.id()),
URI.create("www.properties.tld/" + p.id()),
List.of(Collections.singletonList("alice"),
Collections.singletonList("bob")),
"queue", Optional.empty()));
}
private static class RequestBuilder implements Supplier<Request> {
private final String path;
private final Request.Method method;
private byte[] data = new byte[0];
private AthenzIdentity identity;
private OktaAccessToken oktaAccessToken;
private String contentType = "application/json";
private Map<String, List<String>> headers = new HashMap<>();
private String recursive;
private RequestBuilder(String path, Request.Method method) {
this.path = path;
this.method = method;
}
private RequestBuilder data(byte[] data) { this.data = data; return this; }
private RequestBuilder data(String data) { return data(data.getBytes(StandardCharsets.UTF_8)); }
private RequestBuilder data(MultiPartStreamer streamer) {
return Exceptions.uncheck(() -> data(streamer.data().readAllBytes()).contentType(streamer.contentType()));
}
private RequestBuilder userIdentity(UserId userId) { this.identity = HostedAthenzIdentities.from(userId); return this; }
private RequestBuilder screwdriverIdentity(ScrewdriverId screwdriverId) { this.identity = HostedAthenzIdentities.from(screwdriverId); return this; }
private RequestBuilder oktaAccessToken(OktaAccessToken oktaAccessToken) { this.oktaAccessToken = oktaAccessToken; return this; }
private RequestBuilder contentType(String contentType) { this.contentType = contentType; return this; }
private RequestBuilder recursive(String recursive) { this.recursive = recursive; return this; }
private RequestBuilder header(String name, String value) {
this.headers.putIfAbsent(name, new ArrayList<>());
this.headers.get(name).add(value);
return this;
}
@Override
public Request get() {
Request request = new Request("http:
(recursive == null ? "" : "?recursive=" + recursive),
data, method);
request.getHeaders().addAll(headers);
request.getHeaders().put("Content-Type", contentType);
if (identity != null) {
addIdentityToRequest(request, identity);
}
if (oktaAccessToken != null) {
addOktaAccessToken(request, oktaAccessToken);
}
return request;
}
}
} | class ApplicationApiTest extends ControllerContainerTest {
private static final String responseFiles = "src/test/java/com/yahoo/vespa/hosted/controller/restapi/application/responses/";
private static final String pemPublicKey = "-----BEGIN PUBLIC KEY-----\n" +
"MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuKVFA8dXk43kVfYKzkUqhEY2rDT9\n" +
"z/4jKSTHwbYR8wdsOSrJGVEUPbS2nguIJ64OJH7gFnxM6sxUVj+Nm2HlXw==\n" +
"-----END PUBLIC KEY-----\n";
private static final String quotedPemPublicKey = pemPublicKey.replaceAll("\\n", "\\\\n");
private static final ApplicationPackage applicationPackageDefault = new ApplicationPackageBuilder()
.instances("default")
.environment(Environment.prod)
.globalServiceId("foo")
.region("us-central-1")
.region("us-east-3")
.region("us-west-1")
.blockChange(false, true, "mon-fri", "0-8", "UTC")
.build();
private static final ApplicationPackage applicationPackageInstance1 = new ApplicationPackageBuilder()
.instances("instance1")
.environment(Environment.prod)
.globalServiceId("foo")
.region("us-central-1")
.region("us-east-3")
.region("us-west-1")
.blockChange(false, true, "mon-fri", "0-8", "UTC")
.build();
private static final AthenzDomain ATHENZ_TENANT_DOMAIN = new AthenzDomain("domain1");
private static final AthenzDomain ATHENZ_TENANT_DOMAIN_2 = new AthenzDomain("domain2");
private static final ScrewdriverId SCREWDRIVER_ID = new ScrewdriverId("12345");
private static final UserId USER_ID = new UserId("myuser");
private static final UserId HOSTED_VESPA_OPERATOR = new UserId("johnoperator");
private static final OktaAccessToken OKTA_AT = new OktaAccessToken("dummy");
private static final ZoneId TEST_ZONE = ZoneId.from(Environment.test, RegionName.from("us-east-1"));
private static final ZoneId STAGING_ZONE = ZoneId.from(Environment.staging, RegionName.from("us-east-3"));
private ContainerControllerTester controllerTester;
private ContainerTester tester;
@Before
public void before() {
controllerTester = new ContainerControllerTester(container, responseFiles);
tester = controllerTester.containerTester();
}
@Test
public void testApplicationApi() {
tester.computeVersionStatus();
tester.controller().jobController().setRunner(__ -> { });
createAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);
tester.assertResponse(request("/application/v4/", GET).userIdentity(USER_ID),
new File("root.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1", POST)
.userIdentity(USER_ID)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}")
.oktaAccessToken(OKTA_AT),
new File("tenant-without-applications.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1", PUT)
.userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}"),
new File("tenant-without-applications.json"));
tester.assertResponse(request("/application/v4/user", GET).userIdentity(USER_ID),
new File("user.json"));
tester.assertResponse(request("/application/v4/user", PUT).userIdentity(USER_ID),
"{\"message\":\"Created user 'by-myuser'\"}");
tester.assertResponse(request("/application/v4/user", GET).userIdentity(USER_ID),
new File("user-which-exists.json"));
tester.assertResponse(request("/application/v4/tenant/by-myuser", DELETE).userIdentity(USER_ID),
"{\"tenant\":\"by-myuser\",\"type\":\"USER\",\"applications\":[]}");
tester.assertResponse(request("/application/v4/tenant/", GET).userIdentity(USER_ID),
new File("tenant-list.json"));
createAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN_2, USER_ID);
registerContact(1234);
tester.assertResponse(request("/application/v4/tenant/tenant2", POST)
.userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT)
.data("{\"athensDomain\":\"domain2\", \"property\":\"property2\", \"propertyId\":\"1234\"}"),
new File("tenant-without-applications-with-id.json"));
tester.assertResponse(request("/application/v4/tenant/tenant2", PUT)
.userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT)
.data("{\"athensDomain\":\"domain2\", \"property\":\"property2\", \"propertyId\":\"1234\"}"),
new File("tenant-without-applications-with-id.json"));
updateContactInformation();
tester.assertResponse(request("/application/v4/tenant/tenant2", GET).userIdentity(USER_ID),
new File("tenant-with-contact-info.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1", POST)
.userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT),
new File("application-reference.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1", GET).userIdentity(USER_ID),
new File("tenant-with-application.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/", GET).userIdentity(USER_ID),
new File("application-list.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/", GET).userIdentity(USER_ID),
new File("application-list.json"));
addUserToHostedOperatorRole(HostedAthenzIdentities.from(HOSTED_VESPA_OPERATOR));
MultiPartStreamer entity = createApplicationDeployData(applicationPackageInstance1, true);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/dev/region/us-west-1/instance/instance1/deploy", POST)
.data(entity)
.header("X-Content-Hash", Base64.getEncoder().encodeToString(Signatures.sha256Digest(entity::data)))
.userIdentity(USER_ID),
new File("deploy-result.json"));
ApplicationId id = ApplicationId.from("tenant1", "application1", "instance1");
long screwdriverProjectId = 123;
addScrewdriverUserToDeployRole(SCREWDRIVER_ID,
ATHENZ_TENANT_DOMAIN,
new com.yahoo.vespa.hosted.controller.api.identifiers.ApplicationId(id.application().value()));
controllerTester.jobCompletion(JobType.component)
.application(id)
.projectId(screwdriverProjectId)
.uploadArtifact(applicationPackageInstance1)
.submit();
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/test/region/us-east-1/instance/instance1/", POST)
.data(createApplicationDeployData(Optional.empty(), false))
.screwdriverIdentity(SCREWDRIVER_ID),
new File("deploy-result.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/test/region/us-east-1/instance/instance1", DELETE)
.screwdriverIdentity(SCREWDRIVER_ID),
"{\"message\":\"Deactivated tenant1.application1.instance1 in test.us-east-1\"}");
controllerTester.jobCompletion(JobType.systemTest)
.application(id)
.projectId(screwdriverProjectId)
.submit();
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/staging/region/us-east-3/instance/instance1/", POST)
.data(createApplicationDeployData(Optional.empty(), false))
.screwdriverIdentity(SCREWDRIVER_ID),
new File("deploy-result.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/staging/region/us-east-3/instance/instance1", DELETE)
.screwdriverIdentity(SCREWDRIVER_ID),
"{\"message\":\"Deactivated tenant1.application1.instance1 in staging.us-east-3\"}");
controllerTester.jobCompletion(JobType.stagingTest)
.application(id)
.projectId(screwdriverProjectId)
.submit();
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/", POST)
.data(createApplicationDeployData(Optional.empty(), false))
.screwdriverIdentity(SCREWDRIVER_ID),
new File("deploy-result.json"));
controllerTester.jobCompletion(JobType.productionUsCentral1)
.application(id)
.projectId(screwdriverProjectId)
.unsuccessful()
.submit();
entity = createApplicationDeployData(Optional.empty(),
Optional.of(ApplicationVersion.from(BuildJob.defaultSourceRevision,
BuildJob.defaultBuildNumber - 1)),
true);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/", POST)
.data(entity)
.userIdentity(HOSTED_VESPA_OPERATOR),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"No application package found for tenant1.application1.instance1 with version 1.0.41-commit1\"}",
400);
entity = createApplicationDeployData(Optional.empty(),
Optional.of(ApplicationVersion.from(BuildJob.defaultSourceRevision,
BuildJob.defaultBuildNumber)),
true);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/", POST)
.data(entity)
.userIdentity(HOSTED_VESPA_OPERATOR),
new File("deploy-result.json"));
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.instances("instance1")
.environment(Environment.prod)
.region("us-west-1")
.build();
tester.assertResponse(request("/application/v4/tenant/tenant2/application/application2/instance/default", POST)
.userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT),
new File("application-reference-2.json"));
ApplicationId app2 = ApplicationId.from("tenant2", "application2", "default");
long screwdriverProjectId2 = 456;
addScrewdriverUserToDeployRole(SCREWDRIVER_ID,
ATHENZ_TENANT_DOMAIN_2,
new com.yahoo.vespa.hosted.controller.api.identifiers.ApplicationId(app2.application().value()));
controllerTester.controller().applications().deploymentTrigger().triggerChange(TenantAndApplicationId.from(app2), Change.of(Version.fromString("7.0")));
controllerTester.jobCompletion(JobType.component)
.application(app2)
.projectId(screwdriverProjectId2)
.uploadArtifact(applicationPackage)
.submit();
tester.assertResponse(request("/application/v4/tenant/tenant2/application/application2", GET)
.userIdentity(USER_ID),
new File("application2.json"));
tester.assertResponse(request("/application/v4/tenant/tenant2/application/application2", GET)
.screwdriverIdentity(SCREWDRIVER_ID),
new File("application2.json"));
tester.assertResponse(request("/application/v4/tenant/tenant2/application/application2", PATCH)
.userIdentity(USER_ID)
.data("{\"majorVersion\":7}"),
"{\"message\":\"Set major version to 7\"}");
tester.assertResponse(request("/application/v4/tenant/tenant2/application/application2/key", POST)
.userIdentity(USER_ID)
.data("{\"key\":\"" + pemPublicKey + "\"}"),
"{\"keys\":[\"-----BEGIN PUBLIC KEY-----\\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuKVFA8dXk43kVfYKzkUqhEY2rDT9\\nz/4jKSTHwbYR8wdsOSrJGVEUPbS2nguIJ64OJH7gFnxM6sxUVj+Nm2HlXw==\\n-----END PUBLIC KEY-----\\n\"]}");
tester.assertResponse(request("/application/v4/tenant/tenant2/application/application2/instance/default", PATCH)
.userIdentity(USER_ID)
.data("{\"pemDeployKey\":\"" + pemPublicKey + "\"}"),
"{\"message\":\"Added deploy key " + quotedPemPublicKey + "\"}");
tester.assertResponse(request("/application/v4/tenant/tenant2/application/application2", GET)
.userIdentity(USER_ID),
new File("application2-with-patches.json"));
tester.assertResponse(request("/application/v4/tenant/tenant2/application/application2", PATCH)
.userIdentity(USER_ID)
.data("{\"majorVersion\":null}"),
"{\"message\":\"Set major version to empty\"}");
tester.assertResponse(request("/application/v4/tenant/tenant2/application/application2/key", DELETE)
.userIdentity(USER_ID)
.data("{\"key\":\"" + pemPublicKey + "\"}"),
"{\"keys\":[]}");
tester.assertResponse(request("/application/v4/tenant/tenant2/application/application2", GET)
.userIdentity(USER_ID),
new File("application2.json"));
tester.assertResponse(request("/application/v4/tenant/tenant2/application/application2", DELETE)
.userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT),
"{\"message\":\"Deleted application tenant2.application2\"}");
controllerTester.upgrader().overrideConfidence(Version.fromString("6.1"), VespaVersion.Confidence.broken);
tester.computeVersionStatus();
setDeploymentMaintainedInfo(controllerTester);
setZoneInRotation("rotation-fqdn-1", ZoneId.from("prod", "us-central-1"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1", GET)
.userIdentity(USER_ID),
new File("application.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1", GET)
.userIdentity(USER_ID),
new File("deployment.json"));
addIssues(controllerTester, TenantAndApplicationId.from("tenant1", "application1"));
tester.assertResponse(request("/application/v4/", GET)
.userIdentity(USER_ID)
.recursive("deployment"),
new File("recursive-root.json"));
tester.assertResponse(request("/application/v4/", GET)
.userIdentity(USER_ID)
.recursive("tenant"),
new File("recursive-until-tenant-root.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/", GET)
.userIdentity(USER_ID)
.recursive("true"),
new File("tenant1-recursive.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1", GET)
.userIdentity(USER_ID)
.recursive("true"),
new File("application1-recursive.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/nodes", GET)
.userIdentity(USER_ID),
new File("application-nodes.json"));
tester.assertResponse(request("/application/v4/tenant/tenant2/application/application1/environment/dev/region/us-central-1/instance/default/logs?from=1233&to=3214", GET)
.userIdentity(USER_ID),
"INFO - All good");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying", DELETE)
.userIdentity(HOSTED_VESPA_OPERATOR),
"{\"message\":\"Changed deployment from 'application change to 1.0.42-commit1' to 'no change' for application 'tenant1.application1'\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying", DELETE)
.userIdentity(USER_ID)
.data("{\"cancel\":\"all\"}"),
"{\"message\":\"No deployment in progress for application 'tenant1.application1' at this time\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying/pin", POST)
.userIdentity(USER_ID)
.data("6.1.0"),
"{\"message\":\"Triggered pin to 6.1 for tenant1.application1\"}");
assertTrue("Action is logged to audit log",
tester.controller().auditLogger().readLog().entries().stream()
.anyMatch(entry -> entry.resource().equals("/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying/pin")));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying", GET)
.userIdentity(USER_ID), "{\"platform\":\"6.1\",\"pinned\":true}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying/pin", GET)
.userIdentity(USER_ID), "{\"platform\":\"6.1\",\"pinned\":true}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying/pin", DELETE)
.userIdentity(USER_ID),
"{\"message\":\"Changed deployment from 'pin to 6.1' to 'upgrade to 6.1' for application 'tenant1.application1'\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying", GET)
.userIdentity(USER_ID), "{\"platform\":\"6.1\",\"pinned\":false}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying/pin", POST)
.userIdentity(USER_ID)
.data("6.1"),
"{\"message\":\"Triggered pin to 6.1 for tenant1.application1\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying", GET)
.userIdentity(USER_ID), "{\"platform\":\"6.1\",\"pinned\":true}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying/platform", DELETE)
.userIdentity(USER_ID),
"{\"message\":\"Changed deployment from 'pin to 6.1' to 'pin to current platform' for application 'tenant1.application1'\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying", GET)
.userIdentity(USER_ID), "{\"pinned\":true}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying/pin", DELETE)
.userIdentity(USER_ID),
"{\"message\":\"Changed deployment from 'pin to current platform' to 'no change' for application 'tenant1.application1'\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/deploying", GET)
.userIdentity(USER_ID), "{}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/job/production-us-west-1/pause", POST)
.userIdentity(USER_ID),
"{\"message\":\"production-us-west-1 for tenant1.application1.instance1 paused for " + DeploymentTrigger.maxPause + "\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/job/production-us-west-1", POST)
.userIdentity(USER_ID),
"{\"message\":\"Triggered production-us-west-1 for tenant1.application1.instance1\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/restart", POST)
.userIdentity(USER_ID),
"{\"message\":\"Requested restart of tenant1.application1.instance1 in prod.us-central-1\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/restart", POST)
.screwdriverIdentity(SCREWDRIVER_ID),
"{\"message\":\"Requested restart of tenant1.application1.instance1 in prod.us-central-1\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/staging/region/us-central-1/instance/instance1/restart", POST)
.screwdriverIdentity(SCREWDRIVER_ID),
"{\"message\":\"Requested restart of tenant1.application1.instance1 in staging.us-central-1\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/test/region/us-central-1/instance/instance1/restart", POST)
.screwdriverIdentity(SCREWDRIVER_ID),
"{\"message\":\"Requested restart of tenant1.application1.instance1 in test.us-central-1\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/dev/region/us-central-1/instance/instance1/restart", POST)
.userIdentity(USER_ID),
"{\"message\":\"Requested restart of tenant1.application1.instance1 in dev.us-central-1\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/restart?hostname=host1", POST)
.screwdriverIdentity(SCREWDRIVER_ID),
"{\"error-code\":\"INTERNAL_SERVER_ERROR\",\"message\":\"No node with the hostname host1 is known.\"}", 500);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/suspended", GET)
.userIdentity(USER_ID),
new File("suspended.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/service", GET)
.userIdentity(USER_ID),
new File("services.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/service/storagenode-awe3slno6mmq2fye191y324jl/state/v1/", GET)
.userIdentity(USER_ID),
new File("service.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1", DELETE)
.userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT),
new File("delete-with-active-deployments.json"), 400);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/job/production-us-central-1/test-config", GET)
.userIdentity(USER_ID),
new File("test-config.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/dev/region/us-west-1/instance/instance1", DELETE)
.userIdentity(USER_ID),
"{\"message\":\"Deactivated tenant1.application1.instance1 in dev.us-west-1\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1", DELETE)
.screwdriverIdentity(SCREWDRIVER_ID),
"{\"message\":\"Deactivated tenant1.application1.instance1 in prod.us-central-1\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1", DELETE)
.screwdriverIdentity(SCREWDRIVER_ID),
"{\"message\":\"Deactivated tenant1.application1.instance1 in prod.us-central-1\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/deploy/dev-us-east-1", POST)
.userIdentity(USER_ID)
.data(createApplicationDeployData(applicationPackage, false)),
new File("deployment-job-accepted.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/submit", POST)
.screwdriverIdentity(SCREWDRIVER_ID)
.data(createApplicationSubmissionData(applicationPackage)),
"{\"message\":\"Application package version: 1.0.43-d00d, source revision of repository 'repo', branch 'master' with commit 'd00d', by a@b, built against 6.1 at 1970-01-01T00:00:01Z\"}");
ApplicationPackage packageWithServiceForWrongDomain = new ApplicationPackageBuilder()
.instances("instance1")
.environment(Environment.prod)
.athenzIdentity(com.yahoo.config.provision.AthenzDomain.from(ATHENZ_TENANT_DOMAIN_2.getName()), AthenzService.from("service"))
.region("us-west-1")
.build();
configureAthenzIdentity(new com.yahoo.vespa.athenz.api.AthenzService(ATHENZ_TENANT_DOMAIN_2, "service"), true);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/submit", POST)
.screwdriverIdentity(SCREWDRIVER_ID)
.data(createApplicationSubmissionData(packageWithServiceForWrongDomain)),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"Athenz domain in deployment.xml: [domain2] must match tenant domain: [domain1]\"}", 400);
ApplicationPackage packageWithService = new ApplicationPackageBuilder()
.instances("instance1")
.environment(Environment.prod)
.athenzIdentity(com.yahoo.config.provision.AthenzDomain.from(ATHENZ_TENANT_DOMAIN.getName()), AthenzService.from("service"))
.region("us-west-1")
.build();
configureAthenzIdentity(new com.yahoo.vespa.athenz.api.AthenzService(ATHENZ_TENANT_DOMAIN, "service"), true);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/submit", POST)
.screwdriverIdentity(SCREWDRIVER_ID)
.data(createApplicationSubmissionData(packageWithService)),
"{\"message\":\"Application package version: 1.0.44-d00d, source revision of repository 'repo', branch 'master' with commit 'd00d', by a@b, built against 6.1 at 1970-01-01T00:00:01Z\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/submit", POST)
.screwdriverIdentity(SCREWDRIVER_ID)
.header("X-Content-Hash", "not/the/right/hash")
.data(createApplicationSubmissionData(packageWithService)),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"Value of X-Content-Hash header does not match computed content hash\"}", 400);
MultiPartStreamer streamer = createApplicationSubmissionData(packageWithService);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/submit", POST)
.screwdriverIdentity(SCREWDRIVER_ID)
.header("X-Content-Hash", Base64.getEncoder().encodeToString(Signatures.sha256Digest(streamer::data)))
.data(streamer),
"{\"message\":\"Application package version: 1.0.45-d00d, source revision of repository 'repo', branch 'master' with commit 'd00d', by a@b, built against 6.1 at 1970-01-01T00:00:01Z\"}");
ApplicationId app1 = ApplicationId.from("tenant1", "application1", "instance1");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/jobreport", POST)
.screwdriverIdentity(SCREWDRIVER_ID)
.data(asJson(DeploymentJobs.JobReport.ofComponent(app1,
1234,
123,
Optional.empty(),
BuildJob.defaultSourceRevision))),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"" + app1 + " is set up to be deployed from internally," +
" and no longer accepts submissions from Screwdriver v3 jobs. If you need to revert " +
"to the old pipeline, please file a ticket at yo/vespa-support and request this.\"}",
400);
assertEquals(2, tester.controller().applications().deploymentTrigger().triggerReadyJobs());
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/job", GET)
.userIdentity(USER_ID),
new File("jobs.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/job/system-test", GET)
.userIdentity(USER_ID),
new File("system-test-job.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/job/system-test/run/1", GET)
.userIdentity(USER_ID),
new File("system-test-details.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/job/staging-test", DELETE)
.userIdentity(USER_ID),
"{\"message\":\"Aborting run 1 of staging-test for tenant1.application1.instance1\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/submit", DELETE)
.userIdentity(HOSTED_VESPA_OPERATOR),
"{\"message\":\"Unregistered 'tenant1.application1.instance1' from internal deployment pipeline.\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/jobreport", POST)
.screwdriverIdentity(SCREWDRIVER_ID)
.data(asJson(DeploymentJobs.JobReport.ofComponent(app1,
1234,
123,
Optional.empty(),
BuildJob.defaultSourceRevision))),
"{\"message\":\"ok\"}");
byte[] data = new byte[0];
tester.assertResponse(request("/application/v4/user?user=new_user&domain=by", PUT)
.data(data)
.userIdentity(new UserId("new_user")),
new File("create-user-response.json"));
tester.assertResponse(request("/application/v4/user", GET)
.userIdentity(new UserId("other_user")),
"{\"user\":\"other_user\",\"tenants\":[],\"tenantExists\":false}");
tester.assertResponse(request("/application/v4/", Request.Method.OPTIONS)
.userIdentity(USER_ID),
"");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1", DELETE).userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT),
"{\"message\":\"Deleted instance tenant1.application1.instance1\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1", DELETE).userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT),
new File("tenant-without-applications.json"));
}
private void addIssues(ContainerControllerTester tester, TenantAndApplicationId id) {
tester.controller().applications().lockApplicationOrThrow(id, application ->
tester.controller().applications().store(application.withDeploymentIssueId(IssueId.from("123"))
.withOwnershipIssueId(IssueId.from("321"))
.withOwner(User.from("owner-username"))));
}
@Test
public void testRotationOverride() {
tester.computeVersionStatus();
createAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.instances("instance1")
.globalServiceId("foo")
.region("us-west-1")
.region("us-east-3")
.build();
ApplicationId id = createTenantAndApplication();
long projectId = 1;
MultiPartStreamer deployData = createApplicationDeployData(Optional.of(applicationPackage), false);
startAndTestChange(controllerTester, id, projectId, applicationPackage, deployData, 100);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-west-1/deploy", POST)
.data(deployData)
.screwdriverIdentity(SCREWDRIVER_ID),
new File("deploy-result.json"));
controllerTester.jobCompletion(JobType.productionUsWest1)
.application(id)
.projectId(projectId)
.submit();
setZoneInRotation("rotation-fqdn-1", ZoneId.from("prod", "us-west-1"));
tester.assertResponse(request("/application/v4/tenant/tenant2/application/application2/environment/prod/region/us-west-1/instance/default/global-rotation", GET)
.userIdentity(USER_ID),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"tenant2.application2 not found\"}",
400);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-east-3/global-rotation", GET)
.userIdentity(USER_ID),
"{\"error-code\":\"NOT_FOUND\",\"message\":\"application 'tenant1.application1.instance1' has no deployment in prod.us-east-3\"}",
404);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-east-3/global-rotation/override", PUT)
.userIdentity(USER_ID)
.data("{\"reason\":\"unit-test\"}"),
"{\"error-code\":\"NOT_FOUND\",\"message\":\"application 'tenant1.application1.instance1' has no deployment in prod.us-east-3\"}",
404);
setZoneInRotation("rotation-fqdn-1", ZoneId.from("prod", "us-west-1"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-west-1/global-rotation", GET)
.userIdentity(USER_ID),
new File("global-rotation.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-west-1/global-rotation/override", GET)
.userIdentity(USER_ID),
new File("global-rotation-get.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-west-1/global-rotation/override", PUT)
.userIdentity(USER_ID)
.data("{\"reason\":\"unit-test\"}"),
new File("global-rotation-put.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-west-1/global-rotation/override", DELETE)
.userIdentity(USER_ID)
.data("{\"reason\":\"unit-test\"}"),
new File("global-rotation-delete.json"));
}
@Test
@Test
public void testDeployDirectly() {
tester.computeVersionStatus();
createAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);
addUserToHostedOperatorRole(HostedAthenzIdentities.from(HOSTED_VESPA_OPERATOR));
tester.assertResponse(request("/application/v4/tenant/tenant1", POST).userIdentity(USER_ID)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}")
.oktaAccessToken(OKTA_AT),
new File("tenant-without-applications.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1", POST)
.userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT),
new File("application-reference.json"));
addScrewdriverUserToDeployRole(SCREWDRIVER_ID,
ATHENZ_TENANT_DOMAIN,
new com.yahoo.vespa.hosted.controller.api.identifiers.ApplicationId("application1"));
MultiPartStreamer entity = createApplicationDeployData(applicationPackageInstance1, true);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-central-1/instance/instance1/deploy", POST)
.data(entity)
.screwdriverIdentity(SCREWDRIVER_ID),
new File("deploy-result.json"));
MultiPartStreamer noAppEntity = createApplicationDeployData(Optional.empty(), true);
tester.assertResponse(request("/application/v4/tenant/hosted-vespa/application/routing/environment/prod/region/us-central-1/instance/default/deploy", POST)
.data(noAppEntity)
.userIdentity(HOSTED_VESPA_OPERATOR),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"Deployment of system applications during a system upgrade is not allowed\"}",
400);
tester.upgradeSystem(tester.controller().versionStatus().controllerVersion().get().versionNumber());
tester.assertResponse(request("/application/v4/tenant/hosted-vespa/application/routing/environment/prod/region/us-central-1/instance/default/deploy", POST)
.data(noAppEntity)
.userIdentity(HOSTED_VESPA_OPERATOR),
new File("deploy-result.json"));
tester.assertResponse(request("/application/v4/tenant/hosted-vespa/application/proxy-host/environment/prod/region/us-central-1/instance/instance1/deploy", POST)
.data(noAppEntity)
.userIdentity(HOSTED_VESPA_OPERATOR),
new File("deploy-no-deployment.json"), 400);
}
@Test
public void testSortsDeploymentsAndJobs() {
tester.computeVersionStatus();
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.instances("instance1")
.region("us-east-3")
.build();
ApplicationId id = createTenantAndApplication();
long projectId = 1;
MultiPartStreamer deployData = createApplicationDeployData(Optional.empty(), false);
startAndTestChange(controllerTester, id, projectId, applicationPackage, deployData, 100);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-east-3/deploy", POST)
.data(deployData)
.screwdriverIdentity(SCREWDRIVER_ID),
new File("deploy-result.json"));
controllerTester.jobCompletion(JobType.productionUsEast3)
.application(id)
.projectId(projectId)
.submit();
applicationPackage = new ApplicationPackageBuilder()
.instances("instance1")
.globalServiceId("foo")
.region("us-west-1")
.region("us-east-3")
.build();
startAndTestChange(controllerTester, id, projectId, applicationPackage, deployData, 101);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-west-1/deploy", POST)
.data(deployData)
.screwdriverIdentity(SCREWDRIVER_ID),
new File("deploy-result.json"));
controllerTester.jobCompletion(JobType.productionUsWest1)
.application(id)
.projectId(projectId)
.submit();
setZoneInRotation("rotation-fqdn-1", ZoneId.from("prod", "us-west-1"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1/environment/prod/region/us-east-3/deploy", POST)
.data(deployData)
.screwdriverIdentity(SCREWDRIVER_ID),
new File("deploy-result.json"));
controllerTester.jobCompletion(JobType.productionUsEast3)
.application(id)
.projectId(projectId)
.submit();
setDeploymentMaintainedInfo(controllerTester);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1", GET)
.userIdentity(USER_ID),
new File("application-without-change-multiple-deployments.json"));
}
@Test
public void testMeteringResponses() {
MockMeteringClient mockMeteringClient = (MockMeteringClient) controllerTester.containerTester().serviceRegistry().meteringService();
ResourceAllocation currentSnapshot = new ResourceAllocation(1, 2, 3);
ResourceAllocation thisMonth = new ResourceAllocation(12, 24, 1000);
ResourceAllocation lastMonth = new ResourceAllocation(24, 48, 2000);
ApplicationId applicationId = ApplicationId.from("doesnotexist", "doesnotexist", "default");
Map<ApplicationId, List<ResourceSnapshot>> snapshotHistory = Map.of(applicationId, List.of(
new ResourceSnapshot(applicationId, 1, 2,3, Instant.ofEpochMilli(123)),
new ResourceSnapshot(applicationId, 1, 2,3, Instant.ofEpochMilli(246)),
new ResourceSnapshot(applicationId, 1, 2,3, Instant.ofEpochMilli(492))));
mockMeteringClient.setMeteringInfo(new MeteringInfo(thisMonth, lastMonth, currentSnapshot, snapshotHistory));
tester.assertResponse(request("/application/v4/tenant/doesnotexist/application/doesnotexist/metering", GET)
.userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT),
new File("application1-metering.json"));
}
@Test
public void testErrorResponses() throws Exception {
tester.computeVersionStatus();
createAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);
tester.assertResponse(request("/application/v4/tenant/tenant1", PUT)
.userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}"),
"{\n \"code\" : 403,\n \"message\" : \"Access denied\"\n}",
403);
tester.assertResponse(request("/application/v4/tenant/tenant1", GET)
.userIdentity(USER_ID),
"{\"error-code\":\"NOT_FOUND\",\"message\":\"Tenant 'tenant1' does not exist\"}",
404);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1", GET)
.userIdentity(USER_ID),
"{\"error-code\":\"NOT_FOUND\",\"message\":\"tenant1.application1 not found\"}",
404);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-east/instance/default", GET)
.userIdentity(USER_ID),
"{\"error-code\":\"NOT_FOUND\",\"message\":\"tenant1.application1 not found\"}",
404);
tester.assertResponse(request("/application/v4/tenant/tenant1", POST)
.userIdentity(USER_ID)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}")
.oktaAccessToken(OKTA_AT),
new File("tenant-without-applications.json"));
tester.assertResponse(request("/application/v4/tenant/tenant2", POST)
.userIdentity(USER_ID)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}")
.oktaAccessToken(OKTA_AT),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"Could not create tenant 'tenant2': The Athens domain 'domain1' is already connected to tenant 'tenant1'\"}",
400);
tester.assertResponse(request("/application/v4/tenant/tenant1", POST)
.userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}"),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"Tenant 'tenant1' already exists\"}",
400);
tester.assertResponse(request("/application/v4/tenant/my_tenant_2", POST)
.userIdentity(USER_ID)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}")
.oktaAccessToken(OKTA_AT),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"New tenant or application names must start with a letter, may contain no more than 20 characters, and may only contain lowercase letters, digits or dashes, but no double-dashes.\"}",
400);
tester.assertResponse(request("/application/v4/tenant/by-tenant2", POST)
.userIdentity(USER_ID)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}")
.oktaAccessToken(OKTA_AT),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"Athenz tenant name cannot have prefix 'by-'\"}",
400);
tester.assertResponse(request("/application/v4/tenant/hosted-vespa", POST)
.userIdentity(USER_ID)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}")
.oktaAccessToken(OKTA_AT),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"Tenant 'hosted-vespa' already exists\"}",
400);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1", POST)
.userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT),
new File("application-reference.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1", POST)
.oktaAccessToken(OKTA_AT)
.userIdentity(USER_ID),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"Could not create 'tenant1.application1.instance1': Instance already exists\"}",
400);
ConfigServerMock configServer = serviceRegistry().configServerMock();
configServer.throwOnNextPrepare(new ConfigServerException(new URI("server-url"), "Failed to prepare application", ConfigServerException.ErrorCode.INVALID_APPLICATION_PACKAGE, null));
MultiPartStreamer entity = createApplicationDeployData(applicationPackageInstance1, true);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/dev/region/us-west-1/instance/instance1/deploy", POST)
.data(entity)
.userIdentity(USER_ID),
new File("deploy-failure.json"), 400);
configServer.throwOnNextPrepare(new ConfigServerException(new URI("server-url"), "Failed to prepare application", ConfigServerException.ErrorCode.OUT_OF_CAPACITY, null));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/dev/region/us-west-1/instance/instance1/deploy", POST)
.data(entity)
.userIdentity(USER_ID),
new File("deploy-out-of-capacity.json"), 400);
configServer.throwOnNextPrepare(new ConfigServerException(new URI("server-url"), "Failed to activate application", ConfigServerException.ErrorCode.ACTIVATION_CONFLICT, null));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/dev/region/us-west-1/instance/instance1/deploy", POST)
.data(entity)
.userIdentity(USER_ID),
new File("deploy-activation-conflict.json"), 409);
configServer.throwOnNextPrepare(new ConfigServerException(new URI("server-url"), "Internal server error", ConfigServerException.ErrorCode.INTERNAL_SERVER_ERROR, null));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/dev/region/us-west-1/instance/instance1/deploy", POST)
.data(entity)
.userIdentity(USER_ID),
new File("deploy-internal-server-error.json"), 500);
tester.assertResponse(request("/application/v4/tenant/tenant1", DELETE)
.userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"Could not delete tenant 'tenant1': This tenant has active applications\"}",
400);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1", DELETE)
.userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT),
"{\"message\":\"Deleted instance tenant1.application1.instance1\"}");
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1", DELETE)
.oktaAccessToken(OKTA_AT)
.userIdentity(USER_ID),
"{\"error-code\":\"NOT_FOUND\",\"message\":\"Could not delete instance 'tenant1.application1.instance1': Instance not found\"}",
404);
tester.assertResponse(request("/application/v4/tenant/tenant1", DELETE)
.userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT),
new File("tenant-without-applications.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1", DELETE)
.userIdentity(USER_ID),
"{\n \"code\" : 403,\n \"message\" : \"Access denied\"\n}",
403);
tester.controller().curator().writeTenant(new AthenzTenant(TenantName.from("my_tenant"), ATHENZ_TENANT_DOMAIN,
new Property("property1"), Optional.empty(), Optional.empty()));
tester.assertResponse(request("/application/v4/tenant/my-tenant", POST)
.userIdentity(USER_ID)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}")
.oktaAccessToken(OKTA_AT),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"Tenant 'my-tenant' already exists\"}",
400);
}
@Test
public void testAuthorization() {
UserId authorizedUser = USER_ID;
UserId unauthorizedUser = new UserId("othertenant");
tester.assertResponse(request("/application/v4/tenant/tenant1", POST)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}"),
"{\n \"message\" : \"Not authenticated\"\n}",
401);
tester.assertResponse(request("/application/v4/tenant/", GET)
.userIdentity(USER_ID)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}"),
"[]",
200);
createAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);
tester.assertResponse(request("/application/v4/tenant/tenant1", POST)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}")
.oktaAccessToken(OKTA_AT)
.userIdentity(unauthorizedUser),
"{\"error-code\":\"FORBIDDEN\",\"message\":\"The user 'user.othertenant' is not admin in Athenz domain 'domain1'\"}",
403);
tester.assertResponse(request("/application/v4/tenant/tenant1", POST)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}")
.userIdentity(authorizedUser)
.oktaAccessToken(OKTA_AT),
new File("tenant-without-applications.json"),
200);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1", POST)
.userIdentity(unauthorizedUser)
.oktaAccessToken(OKTA_AT),
"{\n \"code\" : 403,\n \"message\" : \"Access denied\"\n}",
403);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1", POST)
.userIdentity(authorizedUser)
.oktaAccessToken(OKTA_AT),
new File("application-reference.json"),
200);
MultiPartStreamer entity = createApplicationDeployData(applicationPackageDefault, true);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-west-1/instance/default/deploy", POST)
.data(entity)
.userIdentity(USER_ID),
"{\n \"code\" : 403,\n \"message\" : \"Access denied\"\n}",
403);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1", DELETE)
.userIdentity(unauthorizedUser),
"{\n \"code\" : 403,\n \"message\" : \"Access denied\"\n}",
403);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/default", POST)
.userIdentity(authorizedUser)
.oktaAccessToken(OKTA_AT),
new File("application-reference-default.json"),
200);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1", DELETE)
.userIdentity(authorizedUser)
.oktaAccessToken(OKTA_AT),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"Could not delete application; more than one instance present: [tenant1.application1, tenant1.application1.instance1]\"}",
400);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/default", DELETE)
.userIdentity(authorizedUser)
.oktaAccessToken(OKTA_AT),
"{\"message\":\"Deleted instance tenant1.application1.default\"}",
200);
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1", DELETE)
.userIdentity(authorizedUser)
.oktaAccessToken(OKTA_AT),
"{\"message\":\"Deleted application tenant1.application1\"}",
200);
tester.assertResponse(request("/application/v4/tenant/tenant1", PUT)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}")
.userIdentity(unauthorizedUser),
"{\n \"code\" : 403,\n \"message\" : \"Access denied\"\n}",
403);
createAthenzDomainWithAdmin(new AthenzDomain("domain2"), USER_ID);
tester.assertResponse(request("/application/v4/tenant/tenant1", PUT)
.data("{\"athensDomain\":\"domain2\", \"property\":\"property1\"}")
.userIdentity(authorizedUser)
.oktaAccessToken(OKTA_AT),
"{\"tenant\":\"tenant1\",\"type\":\"ATHENS\",\"athensDomain\":\"domain2\",\"property\":\"property1\",\"applications\":[]}",
200);
tester.assertResponse(request("/application/v4/tenant/tenant1", DELETE)
.userIdentity(unauthorizedUser),
"{\n \"code\" : 403,\n \"message\" : \"Access denied\"\n}",
403);
}
@Test
public void deployment_fails_on_illegal_domain_in_deployment_spec() {
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.upgradePolicy("default")
.athenzIdentity(com.yahoo.config.provision.AthenzDomain.from("another.domain"), com.yahoo.config.provision.AthenzService.from("service"))
.environment(Environment.prod)
.region("us-west-1")
.build();
long screwdriverProjectId = 123;
createAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);
configureAthenzIdentity(new com.yahoo.vespa.athenz.api.AthenzService(new AthenzDomain("another.domain"), "service"), true);
Application application = controllerTester.createApplication(ATHENZ_TENANT_DOMAIN.getName(), "tenant1", "application1", "default");
ScrewdriverId screwdriverId = new ScrewdriverId(Long.toString(screwdriverProjectId));
controllerTester.authorize(ATHENZ_TENANT_DOMAIN, screwdriverId, ApplicationAction.deploy, application.id());
controllerTester.jobCompletion(JobType.component)
.application(application)
.projectId(screwdriverProjectId)
.uploadArtifact(applicationPackage)
.submit();
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/test/region/us-east-1/instance/default/", POST)
.data(createApplicationDeployData(applicationPackage, false))
.screwdriverIdentity(screwdriverId),
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"Athenz domain in deployment.xml: [another.domain] must match tenant domain: [domain1]\"}",
400);
}
@Test
public void deployment_succeeds_when_correct_domain_is_used() {
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.upgradePolicy("default")
.athenzIdentity(com.yahoo.config.provision.AthenzDomain.from("domain1"), com.yahoo.config.provision.AthenzService.from("service"))
.environment(Environment.prod)
.region("us-west-1")
.build();
long screwdriverProjectId = 123;
ScrewdriverId screwdriverId = new ScrewdriverId(Long.toString(screwdriverProjectId));
createAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);
configureAthenzIdentity(new com.yahoo.vespa.athenz.api.AthenzService(ATHENZ_TENANT_DOMAIN, "service"), true);
Application application = controllerTester.createApplication(ATHENZ_TENANT_DOMAIN.getName(), "tenant1", "application1", "default");
controllerTester.authorize(ATHENZ_TENANT_DOMAIN, screwdriverId, ApplicationAction.deploy, application.id());
controllerTester.jobCompletion(JobType.component)
.application(application)
.projectId(screwdriverProjectId)
.uploadArtifact(applicationPackage)
.submit();
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/test/region/us-east-1/instance/default/", POST)
.data(createApplicationDeployData(applicationPackage, false))
.screwdriverIdentity(screwdriverId),
new File("deploy-result.json"));
}
@Test
public void deployment_fails_for_personal_tenants_when_athenzdomain_specified_and_user_not_admin() {
tester.computeVersionStatus();
UserId tenantAdmin = new UserId("tenant-admin");
UserId userId = new UserId("new-user");
createAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, tenantAdmin);
configureAthenzIdentity(new com.yahoo.vespa.athenz.api.AthenzService(ATHENZ_TENANT_DOMAIN, "service"), true);
byte[] data = new byte[0];
tester.assertResponse(request("/application/v4/user?user=new_user&domain=by", PUT)
.data(data)
.userIdentity(userId),
new File("create-user-response.json"));
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.upgradePolicy("default")
.athenzIdentity(com.yahoo.config.provision.AthenzDomain.from("domain1"), com.yahoo.config.provision.AthenzService.from("service"))
.environment(Environment.dev)
.region("us-west-1")
.build();
String expectedResult="{\"error-code\":\"BAD_REQUEST\",\"message\":\"User user.new-user is not allowed to launch services in Athenz domain domain1. Please reach out to the domain admin.\"}";
MultiPartStreamer entity = createApplicationDeployData(applicationPackage, true);
tester.assertResponse(request("/application/v4/tenant/by-new-user/application/application1/environment/dev/region/us-west-1/instance/default", POST)
.data(entity)
.userIdentity(userId),
expectedResult,
400);
}
@Test
public void deployment_succeeds_for_personal_tenants_when_user_is_tenant_admin() {
tester.computeVersionStatus();
UserId tenantAdmin = new UserId("new_user");
createAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, tenantAdmin);
configureAthenzIdentity(new com.yahoo.vespa.athenz.api.AthenzService(ATHENZ_TENANT_DOMAIN, "service"), true);
byte[] data = new byte[0];
tester.assertResponse(request("/application/v4/user?user=new_user&domain=by", PUT)
.data(data)
.userIdentity(tenantAdmin),
new File("create-user-response.json"));
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.upgradePolicy("default")
.athenzIdentity(com.yahoo.config.provision.AthenzDomain.from("domain1"), com.yahoo.config.provision.AthenzService.from("service"))
.environment(Environment.dev)
.region("us-west-1")
.build();
MultiPartStreamer entity = createApplicationDeployData(applicationPackage, true);
tester.assertResponse(request("/application/v4/tenant/by-new-user/application/application1/environment/dev/region/us-west-1/instance/default", POST)
.data(entity)
.userIdentity(tenantAdmin),
new File("deploy-result.json"));
}
@Test
public void deployment_fails_when_athenz_service_cannot_be_launched() {
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.upgradePolicy("default")
.athenzIdentity(com.yahoo.config.provision.AthenzDomain.from("domain1"), com.yahoo.config.provision.AthenzService.from("service"))
.environment(Environment.prod)
.region("us-west-1")
.build();
long screwdriverProjectId = 123;
ScrewdriverId screwdriverId = new ScrewdriverId(Long.toString(screwdriverProjectId));
createAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);
configureAthenzIdentity(new com.yahoo.vespa.athenz.api.AthenzService(ATHENZ_TENANT_DOMAIN, "service"), false);
Application application = controllerTester.createApplication(ATHENZ_TENANT_DOMAIN.getName(), "tenant1", "application1", "default");
controllerTester.authorize(ATHENZ_TENANT_DOMAIN, screwdriverId, ApplicationAction.deploy, application.id());
controllerTester.jobCompletion(JobType.component)
.application(application)
.projectId(screwdriverProjectId)
.uploadArtifact(applicationPackage)
.submit();
String expectedResult="{\"error-code\":\"BAD_REQUEST\",\"message\":\"Not allowed to launch Athenz service domain1.service\"}";
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/test/region/us-east-1/instance/default/", POST)
.data(createApplicationDeployData(applicationPackage, false))
.screwdriverIdentity(screwdriverId),
expectedResult,
400);
}
@Test
public void redeployment_succeeds_when_not_specifying_versions_or_application_package() {
addUserToHostedOperatorRole(HostedAthenzIdentities.from(HOSTED_VESPA_OPERATOR));
tester.computeVersionStatus();
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.upgradePolicy("default")
.athenzIdentity(com.yahoo.config.provision.AthenzDomain.from("domain1"), com.yahoo.config.provision.AthenzService.from("service"))
.environment(Environment.prod)
.region("us-west-1")
.build();
long screwdriverProjectId = 123;
ScrewdriverId screwdriverId = new ScrewdriverId(Long.toString(screwdriverProjectId));
createAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);
configureAthenzIdentity(new com.yahoo.vespa.athenz.api.AthenzService(ATHENZ_TENANT_DOMAIN, "service"), true);
Application application = controllerTester.createApplication(ATHENZ_TENANT_DOMAIN.getName(), "tenant1", "application1", "default");
controllerTester.authorize(ATHENZ_TENANT_DOMAIN, screwdriverId, ApplicationAction.deploy, application.id());
controllerTester.jobCompletion(JobType.component)
.application(application)
.projectId(screwdriverProjectId)
.uploadArtifact(applicationPackage)
.submit();
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/test/region/us-east-1/instance/default/", POST)
.data(createApplicationDeployData(applicationPackage, false))
.screwdriverIdentity(screwdriverId),
new File("deploy-result.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/test/region/us-east-1/instance/default/", POST)
.data(createApplicationDeployData(Optional.empty(), true))
.userIdentity(HOSTED_VESPA_OPERATOR),
new File("deploy-result.json"));
}
@Test
public void testJobStatusReporting() {
addUserToHostedOperatorRole(HostedAthenzIdentities.from(HOSTED_VESPA_OPERATOR));
tester.computeVersionStatus();
long projectId = 1;
Application app = controllerTester.createApplication();
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.environment(Environment.prod)
.region("us-central-1")
.build();
Version vespaVersion = new Version("6.1");
BuildJob job = new BuildJob(report -> notifyCompletion(report, controllerTester), controllerTester.containerTester().serviceRegistry().artifactRepositoryMock())
.application(app)
.projectId(projectId);
job.type(JobType.component).uploadArtifact(applicationPackage).submit();
controllerTester.deploy(app.id().defaultInstance(), applicationPackage, TEST_ZONE);
job.type(JobType.systemTest).submit();
Request request = request("/application/v4/tenant/tenant1/application/application1/jobreport", POST)
.data(asJson(job.type(JobType.systemTest).report()))
.userIdentity(HOSTED_VESPA_OPERATOR)
.get();
tester.assertResponse(request, "{\"error-code\":\"BAD_REQUEST\",\"message\":\"Notified of completion " +
"of system-test for tenant1.application1, but that has not been triggered; last was " +
controllerTester.controller().applications().requireInstance(app.id().defaultInstance()).deploymentJobs().jobStatus().get(JobType.systemTest).lastTriggered().get().at() + "\"}", 400);
request = request("/application/v4/tenant/tenant1/application/application1/jobreport", POST)
.data(asJson(job.type(JobType.productionUsEast3).report()))
.userIdentity(HOSTED_VESPA_OPERATOR)
.get();
tester.assertResponse(request, "{\"error-code\":\"BAD_REQUEST\",\"message\":\"Notified of completion " +
"of production-us-east-3 for tenant1.application1, but that has not been triggered; last was never\"}",
400);
JobStatus recordedStatus =
tester.controller().applications().getInstance(app.id().defaultInstance()).get().deploymentJobs().jobStatus().get(JobType.component);
assertNotNull("Status was recorded", recordedStatus);
assertTrue(recordedStatus.isSuccess());
assertEquals(vespaVersion, recordedStatus.lastCompleted().get().platform());
recordedStatus =
tester.controller().applications().getInstance(app.id().defaultInstance()).get().deploymentJobs().jobStatus().get(JobType.productionApNortheast2);
assertNull("Status of never-triggered jobs is empty", recordedStatus);
assertTrue("All jobs have been run", tester.controller().applications().deploymentTrigger().jobsToRun().isEmpty());
}
@Test
public void testJobStatusReportingOutOfCapacity() {
controllerTester.containerTester().computeVersionStatus();
long projectId = 1;
Application app = controllerTester.createApplication();
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.environment(Environment.prod)
.region("us-central-1")
.build();
BuildJob job = new BuildJob(report -> notifyCompletion(report, controllerTester), controllerTester.containerTester().serviceRegistry().artifactRepositoryMock())
.application(app)
.projectId(projectId);
job.type(JobType.component).uploadArtifact(applicationPackage).submit();
controllerTester.deploy(app.id().defaultInstance(), applicationPackage, TEST_ZONE);
job.type(JobType.systemTest).submit();
controllerTester.deploy(app.id().defaultInstance(), applicationPackage, STAGING_ZONE);
job.type(JobType.stagingTest).error(DeploymentJobs.JobError.outOfCapacity).submit();
JobStatus jobStatus = tester.controller().applications().getInstance(app.id().defaultInstance()).get()
.deploymentJobs()
.jobStatus()
.get(JobType.stagingTest);
assertFalse(jobStatus.isSuccess());
assertEquals(DeploymentJobs.JobError.outOfCapacity, jobStatus.jobError().get());
}
@Test
public void applicationWithRoutingPolicy() {
Application app = controllerTester.createApplication();
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.environment(Environment.prod)
.region("us-west-1")
.build();
controllerTester.deployCompletely(app, applicationPackage, 1, false);
RoutingPolicy policy = new RoutingPolicy(app.id().defaultInstance(),
ClusterSpec.Id.from("default"),
ZoneId.from(Environment.prod, RegionName.from("us-west-1")),
HostName.from("lb-0-canonical-name"),
Optional.of("dns-zone-1"), Set.of(EndpointId.of("c0")));
tester.controller().curator().writeRoutingPolicies(app.id().defaultInstance(), Set.of(policy));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1", GET)
.userIdentity(USER_ID),
new File("application-with-routing-policy.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/environment/prod/region/us-west-1/instance/default", GET)
.userIdentity(USER_ID),
new File("deployment-with-routing-policy.json"));
}
private void notifyCompletion(DeploymentJobs.JobReport report, ContainerControllerTester tester) {
assertResponse(request("/application/v4/tenant/tenant1/application/application1/jobreport", POST)
.userIdentity(HOSTED_VESPA_OPERATOR)
.data(asJson(report))
.get(),
200, "{\"message\":\"ok\"}");
tester.controller().applications().deploymentTrigger().triggerReadyJobs();
}
private static byte[] asJson(DeploymentJobs.JobReport report) {
Slime slime = new Slime();
Cursor cursor = slime.setObject();
cursor.setLong("projectId", report.projectId());
cursor.setString("jobName", report.jobType().jobName());
cursor.setLong("buildNumber", report.buildNumber());
report.jobError().ifPresent(jobError -> cursor.setString("jobError", jobError.name()));
report.version().flatMap(ApplicationVersion::source).ifPresent(sr -> {
Cursor sourceRevision = cursor.setObject("sourceRevision");
sourceRevision.setString("repository", sr.repository());
sourceRevision.setString("branch", sr.branch());
sourceRevision.setString("commit", sr.commit());
});
cursor.setString("tenant", report.applicationId().tenant().value());
cursor.setString("application", report.applicationId().application().value());
cursor.setString("instance", report.applicationId().instance().value());
try {
return SlimeUtils.toJsonBytes(slime);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
private MultiPartStreamer createApplicationDeployData(ApplicationPackage applicationPackage, boolean deployDirectly) {
return createApplicationDeployData(Optional.of(applicationPackage), deployDirectly);
}
private MultiPartStreamer createApplicationDeployData(Optional<ApplicationPackage> applicationPackage, boolean deployDirectly) {
return createApplicationDeployData(applicationPackage, Optional.empty(), deployDirectly);
}
private MultiPartStreamer createApplicationDeployData(Optional<ApplicationPackage> applicationPackage,
Optional<ApplicationVersion> applicationVersion, boolean deployDirectly) {
MultiPartStreamer streamer = new MultiPartStreamer();
streamer.addJson("deployOptions", deployOptions(deployDirectly, applicationVersion));
applicationPackage.ifPresent(ap -> streamer.addBytes("applicationZip", ap.zippedContent()));
return streamer;
}
private MultiPartStreamer createApplicationSubmissionData(ApplicationPackage applicationPackage) {
return new MultiPartStreamer().addJson(EnvironmentResource.SUBMIT_OPTIONS, "{\"repository\":\"repo\",\"branch\":\"master\",\"commit\":\"d00d\",\"authorEmail\":\"a@b\"}")
.addBytes(EnvironmentResource.APPLICATION_ZIP, applicationPackage.zippedContent())
.addBytes(EnvironmentResource.APPLICATION_TEST_ZIP, "content".getBytes());
}
private String deployOptions(boolean deployDirectly, Optional<ApplicationVersion> applicationVersion) {
return "{\"vespaVersion\":null," +
"\"ignoreValidationErrors\":false," +
"\"deployDirectly\":" + deployDirectly +
applicationVersion.map(version ->
"," +
"\"buildNumber\":" + version.buildNumber().getAsLong() + "," +
"\"sourceRevision\":{" +
"\"repository\":\"" + version.source().get().repository() + "\"," +
"\"branch\":\"" + version.source().get().branch() + "\"," +
"\"commit\":\"" + version.source().get().commit() + "\"" +
"}"
).orElse("") +
"}";
}
/** Make a request with (athens) user domain1.mytenant */
private RequestBuilder request(String path, Request.Method method) {
return new RequestBuilder(path, method);
}
/**
* In production this happens outside hosted Vespa, so there is no API for it and we need to reach down into the
* mock setup to replicate the action.
*/
private void createAthenzDomainWithAdmin(AthenzDomain domain, UserId userId) {
AthenzClientFactoryMock mock = (AthenzClientFactoryMock) container.components()
.getComponent(AthenzClientFactoryMock.class.getName());
AthenzDbMock.Domain domainMock = mock.getSetup().getOrCreateDomain(domain);
domainMock.markAsVespaTenant();
domainMock.admin(AthenzUser.fromUserId(userId.id()));
}
/**
* Mock athenz service identity configuration. Simulates that configserver is allowed to launch a service
*/
private void configureAthenzIdentity(com.yahoo.vespa.athenz.api.AthenzService service, boolean allowLaunch) {
AthenzClientFactoryMock mock = (AthenzClientFactoryMock) container.components()
.getComponent(AthenzClientFactoryMock.class.getName());
AthenzDbMock.Domain domainMock = mock.getSetup().domains.computeIfAbsent(service.getDomain(), AthenzDbMock.Domain::new);
domainMock.services.put(service.getName(), new AthenzDbMock.Service(allowLaunch));
}
/**
* In production this happens outside hosted Vespa, so there is no API for it and we need to reach down into the
* mock setup to replicate the action.
*/
private void addScrewdriverUserToDeployRole(ScrewdriverId screwdriverId,
AthenzDomain domain,
com.yahoo.vespa.hosted.controller.api.identifiers.ApplicationId applicationId) {
AthenzClientFactoryMock mock = (AthenzClientFactoryMock) container.components()
.getComponent(AthenzClientFactoryMock.class.getName());
AthenzIdentity screwdriverIdentity = HostedAthenzIdentities.from(screwdriverId);
AthenzDbMock.Application athenzApplication = mock.getSetup().domains.get(domain).applications.get(applicationId);
athenzApplication.addRoleMember(ApplicationAction.deploy, screwdriverIdentity);
}
private ApplicationId createTenantAndApplication() {
createAthenzDomainWithAdmin(ATHENZ_TENANT_DOMAIN, USER_ID);
tester.assertResponse(request("/application/v4/tenant/tenant1", POST)
.userIdentity(USER_ID)
.data("{\"athensDomain\":\"domain1\", \"property\":\"property1\"}")
.oktaAccessToken(OKTA_AT),
new File("tenant-without-applications.json"));
tester.assertResponse(request("/application/v4/tenant/tenant1/application/application1/instance/instance1", POST)
.userIdentity(USER_ID)
.oktaAccessToken(OKTA_AT),
new File("application-reference.json"));
addScrewdriverUserToDeployRole(SCREWDRIVER_ID, ATHENZ_TENANT_DOMAIN,
new com.yahoo.vespa.hosted.controller.api.identifiers.ApplicationId("application1"));
return ApplicationId.from("tenant1", "application1", "instance1");
}
private void startAndTestChange(ContainerControllerTester controllerTester, ApplicationId application,
long projectId, ApplicationPackage applicationPackage,
MultiPartStreamer deployData, long buildNumber) {
ContainerTester tester = controllerTester.containerTester();
controllerTester.containerTester().serviceRegistry().artifactRepositoryMock()
.put(application, applicationPackage,"1.0." + buildNumber + "-commit1");
controllerTester.jobCompletion(JobType.component)
.application(application)
.projectId(projectId)
.buildNumber(buildNumber)
.submit();
String testPath = String.format("/application/v4/tenant/%s/application/%s/instance/%s/environment/test/region/us-east-1",
application.tenant().value(), application.application().value(), application.instance().value());
tester.assertResponse(request(testPath, POST)
.data(deployData)
.screwdriverIdentity(SCREWDRIVER_ID),
new File("deploy-result.json"));
tester.assertResponse(request(testPath, DELETE)
.screwdriverIdentity(SCREWDRIVER_ID),
"{\"message\":\"Deactivated " + application + " in test.us-east-1\"}");
controllerTester.jobCompletion(JobType.systemTest)
.application(application)
.projectId(projectId)
.submit();
String stagingPath = String.format("/application/v4/tenant/%s/application/%s/instance/%s/environment/staging/region/us-east-3",
application.tenant().value(), application.application().value(), application.instance().value());
tester.assertResponse(request(stagingPath, POST)
.data(deployData)
.screwdriverIdentity(SCREWDRIVER_ID),
new File("deploy-result.json"));
tester.assertResponse(request(stagingPath, DELETE)
.screwdriverIdentity(SCREWDRIVER_ID),
"{\"message\":\"Deactivated " + application + " in staging.us-east-3\"}");
controllerTester.jobCompletion(JobType.stagingTest)
.application(application)
.projectId(projectId)
.submit();
}
/**
* Cluster info, utilization and application and deployment metrics are maintained async by maintainers.
*
* This sets these values as if the maintainers has been ran.
*/
private void setDeploymentMaintainedInfo(ContainerControllerTester controllerTester) {
for (Application application : controllerTester.controller().applications().asList()) {
controllerTester.controller().applications().lockApplicationOrThrow(application.id(), lockedApplication -> {
lockedApplication = lockedApplication.with(new ApplicationMetrics(0.5, 0.7));
for (Instance instance : application.instances().values()) {
for (Deployment deployment : instance.deployments().values()) {
Map<ClusterSpec.Id, ClusterInfo> clusterInfo = new HashMap<>();
List<String> hostnames = new ArrayList<>();
hostnames.add("host1");
hostnames.add("host2");
clusterInfo.put(ClusterSpec.Id.from("cluster1"),
new ClusterInfo("flavor1", 37, 2, 4, 50,
ClusterSpec.Type.content, hostnames));
Map<ClusterSpec.Id, ClusterUtilization> clusterUtils = new HashMap<>();
clusterUtils.put(ClusterSpec.Id.from("cluster1"), new ClusterUtilization(0.3, 0.6, 0.4, 0.3));
DeploymentMetrics metrics = new DeploymentMetrics(1, 2, 3, 4, 5,
Optional.of(Instant.ofEpochMilli(123123)), Map.of());
lockedApplication = lockedApplication.with(instance.name(),
lockedInstance -> lockedInstance.withClusterInfo(deployment.zone(), clusterInfo)
.withClusterUtilization(deployment.zone(), clusterUtils)
.with(deployment.zone(), metrics)
.recordActivityAt(Instant.parse("2018-06-01T10:15:30.00Z"), deployment.zone()));
}
controllerTester.controller().applications().store(lockedApplication);
}
});
}
}
private ServiceRegistryMock serviceRegistry() {
return (ServiceRegistryMock) tester.container().components().getComponent(ServiceRegistryMock.class.getName());
}
private void setZoneInRotation(String rotationName, ZoneId zone) {
serviceRegistry().globalRoutingServiceMock().setStatus(rotationName, zone, com.yahoo.vespa.hosted.controller.api.integration.routing.RotationStatus.IN);
new RotationStatusUpdater(tester.controller(), Duration.ofDays(1), new JobControl(tester.controller().curator())).run();
}
private RotationStatus rotationStatus(Instance instance) {
return controllerTester.controller().applications().rotationRepository().getRotation(instance)
.map(rotation -> {
var rotationStatus = controllerTester.controller().serviceRegistry().globalRoutingService().getHealthStatus(rotation.name());
var statusMap = new LinkedHashMap<ZoneId, RotationState>();
rotationStatus.forEach((zone, status) -> statusMap.put(zone, RotationState.in));
return RotationStatus.from(Map.of(rotation.id(), statusMap));
})
.orElse(RotationStatus.EMPTY);
}
private void updateContactInformation() {
Contact contact = new Contact(URI.create("www.contacts.tld/1234"),
URI.create("www.properties.tld/1234"),
URI.create("www.issues.tld/1234"),
List.of(List.of("alice"), List.of("bob")), "queue", Optional.empty());
tester.controller().tenants().lockIfPresent(TenantName.from("tenant2"),
LockedTenant.Athenz.class,
lockedTenant -> tester.controller().tenants().store(lockedTenant.with(contact)));
}
private void registerContact(long propertyId) {
PropertyId p = new PropertyId(String.valueOf(propertyId));
serviceRegistry().contactRetrieverMock().addContact(p, new Contact(URI.create("www.issues.tld/" + p.id()),
URI.create("www.contacts.tld/" + p.id()),
URI.create("www.properties.tld/" + p.id()),
List.of(Collections.singletonList("alice"),
Collections.singletonList("bob")),
"queue", Optional.empty()));
}
private static class RequestBuilder implements Supplier<Request> {
private final String path;
private final Request.Method method;
private byte[] data = new byte[0];
private AthenzIdentity identity;
private OktaAccessToken oktaAccessToken;
private String contentType = "application/json";
private Map<String, List<String>> headers = new HashMap<>();
private String recursive;
private RequestBuilder(String path, Request.Method method) {
this.path = path;
this.method = method;
}
private RequestBuilder data(byte[] data) { this.data = data; return this; }
private RequestBuilder data(String data) { return data(data.getBytes(StandardCharsets.UTF_8)); }
private RequestBuilder data(MultiPartStreamer streamer) {
return Exceptions.uncheck(() -> data(streamer.data().readAllBytes()).contentType(streamer.contentType()));
}
private RequestBuilder userIdentity(UserId userId) { this.identity = HostedAthenzIdentities.from(userId); return this; }
private RequestBuilder screwdriverIdentity(ScrewdriverId screwdriverId) { this.identity = HostedAthenzIdentities.from(screwdriverId); return this; }
private RequestBuilder oktaAccessToken(OktaAccessToken oktaAccessToken) { this.oktaAccessToken = oktaAccessToken; return this; }
private RequestBuilder contentType(String contentType) { this.contentType = contentType; return this; }
private RequestBuilder recursive(String recursive) { this.recursive = recursive; return this; }
private RequestBuilder header(String name, String value) {
this.headers.putIfAbsent(name, new ArrayList<>());
this.headers.get(name).add(value);
return this;
}
@Override
public Request get() {
Request request = new Request("http:
(recursive == null ? "" : "?recursive=" + recursive),
data, method);
request.getHeaders().addAll(headers);
request.getHeaders().put("Content-Type", contentType);
if (identity != null) {
addIdentityToRequest(request, identity);
}
if (oktaAccessToken != null) {
addOktaAccessToken(request, oktaAccessToken);
}
return request;
}
}
} |
```suggestion return JAXBContext.newInstance(classes.toArray(new Class[0]), properties); ``` | public JAXBContext jaxbContext(Instance<JaxbContextCustomizer> customizers) {
try {
Map<String, Object> properties = new HashMap<>();
List<JaxbContextCustomizer> sortedCustomizers = sortCustomizersInDescendingPriorityOrder(customizers);
for (JaxbContextCustomizer customizer : sortedCustomizers) {
customizer.customizeContextProperties(properties);
}
String[] classNamesToBeBounded = JaxbContextConfigRecorder.getClassesToBeBound();
List<Class<?>> classes = new ArrayList<>();
for (int i = 0; i < classNamesToBeBounded.length; i++) {
Class<?> clazz = getClassByName(classNamesToBeBounded[i]);
if (!clazz.isPrimitive()) {
classes.add(clazz);
}
}
return JAXBContext.newInstance(classes.toArray(new Class[classes.size()]), properties);
} catch (JAXBException | ClassNotFoundException e) {
throw new RuntimeException(e);
}
} | return JAXBContext.newInstance(classes.toArray(new Class[classes.size()]), properties); | public JAXBContext jaxbContext(Instance<JaxbContextCustomizer> customizers) {
try {
Map<String, Object> properties = new HashMap<>();
List<JaxbContextCustomizer> sortedCustomizers = sortCustomizersInDescendingPriorityOrder(customizers);
for (JaxbContextCustomizer customizer : sortedCustomizers) {
customizer.customizeContextProperties(properties);
}
String[] classNamesToBeBounded = JaxbContextConfigRecorder.getClassesToBeBound();
List<Class<?>> classes = new ArrayList<>();
for (int i = 0; i < classNamesToBeBounded.length; i++) {
Class<?> clazz = getClassByName(classNamesToBeBounded[i]);
if (!clazz.isPrimitive()) {
classes.add(clazz);
}
}
return JAXBContext.newInstance(classes.toArray(new Class[0]), properties);
} catch (JAXBException | ClassNotFoundException e) {
throw new RuntimeException(e);
}
} | class JaxbContextProducer {
@DefaultBean
@Singleton
@Produces
@DefaultBean
@Singleton
@Produces
public Marshaller marshaller(JAXBContext jaxbContext, Instance<JaxbContextCustomizer> customizers) {
try {
Marshaller marshaller = jaxbContext.createMarshaller();
List<JaxbContextCustomizer> sortedCustomizers = sortCustomizersInDescendingPriorityOrder(customizers);
for (JaxbContextCustomizer customizer : sortedCustomizers) {
customizer.customizeMarshaller(marshaller);
}
return marshaller;
} catch (JAXBException e) {
throw new RuntimeException(e);
}
}
@DefaultBean
@Singleton
@Produces
public Unmarshaller unmarshaller(JAXBContext jaxbContext, Instance<JaxbContextCustomizer> customizers) {
try {
Unmarshaller unmarshaller = jaxbContext.createUnmarshaller();
List<JaxbContextCustomizer> sortedCustomizers = sortCustomizersInDescendingPriorityOrder(customizers);
for (JaxbContextCustomizer customizer : sortedCustomizers) {
customizer.customizeUnmarshaller(unmarshaller);
}
return unmarshaller;
} catch (JAXBException e) {
throw new RuntimeException(e);
}
}
private List<JaxbContextCustomizer> sortCustomizersInDescendingPriorityOrder(Instance<JaxbContextCustomizer> customizers) {
List<JaxbContextCustomizer> sortedCustomizers = new ArrayList<>();
for (JaxbContextCustomizer customizer : customizers) {
sortedCustomizers.add(customizer);
}
Collections.sort(sortedCustomizers);
return sortedCustomizers;
}
private Class<?> getClassByName(String name) throws ClassNotFoundException {
return Class.forName(name, false, Thread.currentThread().getContextClassLoader());
}
} | class JaxbContextProducer {
@DefaultBean
@Singleton
@Produces
@DefaultBean
@Singleton
@Produces
public Marshaller marshaller(JAXBContext jaxbContext, Instance<JaxbContextCustomizer> customizers) {
try {
Marshaller marshaller = jaxbContext.createMarshaller();
List<JaxbContextCustomizer> sortedCustomizers = sortCustomizersInDescendingPriorityOrder(customizers);
for (JaxbContextCustomizer customizer : sortedCustomizers) {
customizer.customizeMarshaller(marshaller);
}
return marshaller;
} catch (JAXBException e) {
throw new RuntimeException(e);
}
}
@DefaultBean
@Singleton
@Produces
public Unmarshaller unmarshaller(JAXBContext jaxbContext, Instance<JaxbContextCustomizer> customizers) {
try {
Unmarshaller unmarshaller = jaxbContext.createUnmarshaller();
List<JaxbContextCustomizer> sortedCustomizers = sortCustomizersInDescendingPriorityOrder(customizers);
for (JaxbContextCustomizer customizer : sortedCustomizers) {
customizer.customizeUnmarshaller(unmarshaller);
}
return unmarshaller;
} catch (JAXBException e) {
throw new RuntimeException(e);
}
}
private List<JaxbContextCustomizer> sortCustomizersInDescendingPriorityOrder(Instance<JaxbContextCustomizer> customizers) {
List<JaxbContextCustomizer> sortedCustomizers = new ArrayList<>();
for (JaxbContextCustomizer customizer : customizers) {
sortedCustomizers.add(customizer);
}
Collections.sort(sortedCustomizers);
return sortedCustomizers;
}
private Class<?> getClassByName(String name) throws ClassNotFoundException {
return Class.forName(name, false, Thread.currentThread().getContextClassLoader());
}
} |
this is a private method that is used within this file that is supposed to simplify the code that deals with UUIDs. Also, `32 - length` is a start index, so it will return exactly what's asked for. It's still error-prone (length can be bigger than 32), but given it's never meant to be shared outside of this file, simplicity seems more important to me. | private static String getRandomId(int length) {
return CoreUtils.randomUuid().toString()
.replace("-", "")
.substring(32 - length);
} | .substring(32 - length); | private static String getRandomId(int length) {
return CoreUtils.randomUuid().toString()
.replace("-", "")
.substring(32 - length);
} | class LoggingSpan {
public static final LoggingSpan NOOP = new LoggingSpan();
private static final ClientLogger LOGGER = new ClientLogger(LoggingSpan.class);
private final String traceId;
private final String spanId;
private final LoggingEventBuilder log;
private final boolean enabled;
private LoggingSpan() {
this.traceId = null;
this.spanId = null;
this.log = null;
this.enabled = false;
}
public String getTraceId() {
return enabled ? traceId : "00000000000000000000000000000000";
}
public String getSpanId() {
return enabled ? spanId : "0000000000000000";
}
LoggingSpan(String name, SpanKind kind, String traceId, String parentSpanId) {
this.traceId = traceId != null ? traceId : getRandomId(32);
this.spanId = getRandomId(16);
this.log = LOGGER.atInfo()
.addKeyValue("traceId", this.traceId)
.addKeyValue("spanId", spanId)
.addKeyValue("parentSpanId", parentSpanId)
.addKeyValue("name", name)
.addKeyValue("kind", kind.name());
log.log("span created");
this.enabled = true;
}
LoggingSpan(String name, SpanKind kind, LoggingSpan parent) {
this(name, kind, parent.enabled ? parent.traceId : null, parent.getSpanId());
}
public LoggingSpan addKeyValue(String key, Object value) {
if (enabled) {
log.addKeyValue(key, value);
}
return this;
}
public void end(Throwable throwable) {
if (enabled) {
if (throwable != null) {
log.log("span ended", throwable);
} else {
log.log("span ended");
}
}
}
/**
* Generates random id with given length up to 32 chars.
*/
} | class LoggingSpan {
public static final LoggingSpan NOOP = new LoggingSpan();
private static final ClientLogger LOGGER = new ClientLogger(LoggingSpan.class);
private final String traceId;
private final String spanId;
private final LoggingEventBuilder log;
private final boolean enabled;
private LoggingSpan() {
this.traceId = null;
this.spanId = null;
this.log = null;
this.enabled = false;
}
public String getTraceId() {
return enabled ? traceId : "00000000000000000000000000000000";
}
public String getSpanId() {
return enabled ? spanId : "0000000000000000";
}
LoggingSpan(String name, SpanKind kind, String traceId, String parentSpanId) {
this.traceId = traceId != null ? traceId : getRandomId(32);
this.spanId = getRandomId(16);
this.log = LOGGER.atInfo()
.addKeyValue("traceId", this.traceId)
.addKeyValue("spanId", spanId)
.addKeyValue("parentSpanId", parentSpanId)
.addKeyValue("name", name)
.addKeyValue("kind", kind.name());
log.log("span created");
this.enabled = true;
}
LoggingSpan(String name, SpanKind kind, LoggingSpan parent) {
this(name, kind, parent.enabled ? parent.traceId : null, parent.getSpanId());
}
public LoggingSpan addKeyValue(String key, Object value) {
if (enabled) {
log.addKeyValue(key, value);
}
return this;
}
public void end(Throwable throwable) {
if (enabled) {
if (throwable != null) {
log.log("span ended", throwable);
} else {
log.log("span ended");
}
}
}
/**
* Generates random id with given length up to 32 chars.
*/
} |
At the end we should also take into account whether this partition actually occupies local resources (see https://issues.apache.org/jira/browse/FLINK-12890) otherwise TM and its connection can be released independently. | void markFinished(Map<String, Accumulator<?, ?>> userAccumulators, IOMetrics metrics) {
assertRunningInJobMasterMainThread();
while (true) {
ExecutionState current = this.state;
if (current == RUNNING || current == DEPLOYING) {
if (transitionState(current, FINISHED)) {
try {
for (IntermediateResultPartition finishedPartition
: getVertex().finishAllBlockingPartitions()) {
IntermediateResultPartition[] allPartitions = finishedPartition
.getIntermediateResult().getPartitions();
PartitionTable<ResourceID> partitionTable = vertex.getExecutionGraph().getPartitionTable();
for (IntermediateResultPartition partition : allPartitions) {
ResultPartitionDeploymentDescriptor descriptor = producedPartitions.get(partition.getPartitionId());
if (!descriptor.isReleasedOnConsumption()) {
if (!taskManagerLocationFuture.isDone()) {
throw new IllegalStateException("Execution finished without having an assigned location.");
} else {
partitionTable.startTrackingPartition(
getAssignedResourceLocation().getResourceID(),
descriptor.getShuffleDescriptor().getResultPartitionID());
}
}
}
for (IntermediateResultPartition partition : allPartitions) {
scheduleOrUpdateConsumers(partition.getConsumers());
}
}
updateAccumulatorsAndMetrics(userAccumulators, metrics);
releaseAssignedResource(null);
vertex.getExecutionGraph().deregisterExecution(this);
}
finally {
vertex.executionFinished(this);
}
return;
}
}
else if (current == CANCELING) {
completeCancelling(userAccumulators, metrics);
return;
}
else if (current == CANCELED || current == FAILED) {
if (LOG.isDebugEnabled()) {
LOG.debug("Task FINISHED, but concurrently went to state " + state);
}
return;
}
else {
markFailed(new Exception("Vertex received FINISHED message while being in state " + state));
return;
}
}
} | if (!descriptor.isReleasedOnConsumption()) { | void markFinished(Map<String, Accumulator<?, ?>> userAccumulators, IOMetrics metrics) {
assertRunningInJobMasterMainThread();
while (true) {
ExecutionState current = this.state;
if (current == RUNNING || current == DEPLOYING) {
if (transitionState(current, FINISHED)) {
try {
for (IntermediateResultPartition finishedPartition
: getVertex().finishAllBlockingPartitions()) {
IntermediateResultPartition[] allPartitions = finishedPartition
.getIntermediateResult().getPartitions();
for (IntermediateResultPartition partition : allPartitions) {
scheduleOrUpdateConsumers(partition.getConsumers());
}
}
updateAccumulatorsAndMetrics(userAccumulators, metrics);
releaseAssignedResource(null);
vertex.getExecutionGraph().deregisterExecution(this);
}
finally {
vertex.executionFinished(this);
}
return;
}
}
else if (current == CANCELING) {
completeCancelling(userAccumulators, metrics);
return;
}
else if (current == CANCELED || current == FAILED) {
if (LOG.isDebugEnabled()) {
LOG.debug("Task FINISHED, but concurrently went to state " + state);
}
return;
}
else {
markFailed(new Exception("Vertex received FINISHED message while being in state " + state));
return;
}
}
} | class Execution implements AccessExecution, Archiveable<ArchivedExecution>, LogicalSlot.Payload {
private static final AtomicReferenceFieldUpdater<Execution, ExecutionState> STATE_UPDATER =
AtomicReferenceFieldUpdater.newUpdater(Execution.class, ExecutionState.class, "state");
private static final AtomicReferenceFieldUpdater<Execution, LogicalSlot> ASSIGNED_SLOT_UPDATER = AtomicReferenceFieldUpdater.newUpdater(
Execution.class,
LogicalSlot.class,
"assignedResource");
private static final Logger LOG = ExecutionGraph.LOG;
private static final int NUM_CANCEL_CALL_TRIES = 3;
private static final int NUM_STOP_CALL_TRIES = 3;
/** The executor which is used to execute futures. */
private final Executor executor;
/** The execution vertex whose task this execution executes. */
private final ExecutionVertex vertex;
/** The unique ID marking the specific execution instant of the task. */
private final ExecutionAttemptID attemptId;
/** Gets the global modification version of the execution graph when this execution was created.
* This version is bumped in the ExecutionGraph whenever a global failover happens. It is used
* to resolve conflicts between concurrent modification by global and local failover actions. */
private final long globalModVersion;
/** The timestamps when state transitions occurred, indexed by {@link ExecutionState
private final long[] stateTimestamps;
private final int attemptNumber;
private final Time rpcTimeout;
private final Collection<PartitionInfo> partitionInfos;
/** A future that completes once the Execution reaches a terminal ExecutionState. */
private final CompletableFuture<ExecutionState> terminalStateFuture;
private final CompletableFuture<?> releaseFuture;
private final CompletableFuture<TaskManagerLocation> taskManagerLocationFuture;
private volatile ExecutionState state = CREATED;
private volatile LogicalSlot assignedResource;
private volatile Throwable failureCause;
/** Information to restore the task on recovery, such as checkpoint id and task state snapshot. */
@Nullable
private volatile JobManagerTaskRestore taskRestore;
/** This field holds the allocation id once it was assigned successfully. */
@Nullable
private volatile AllocationID assignedAllocationID;
/** Lock for updating the accumulators atomically.
* Prevents final accumulators to be overwritten by partial accumulators on a late heartbeat. */
private final Object accumulatorLock = new Object();
/* Continuously updated map of user-defined accumulators */
private volatile Map<String, Accumulator<?, ?>> userAccumulators;
private volatile IOMetrics ioMetrics;
private Map<IntermediateResultPartitionID, ResultPartitionDeploymentDescriptor> producedPartitions;
/**
* Creates a new Execution attempt.
*
* @param executor
* The executor used to dispatch callbacks from futures and asynchronous RPC calls.
* @param vertex
* The execution vertex to which this Execution belongs
* @param attemptNumber
* The execution attempt number.
* @param globalModVersion
* The global modification version of the execution graph when this execution was created
* @param startTimestamp
* The timestamp that marks the creation of this Execution
* @param rpcTimeout
* The rpcTimeout for RPC calls like deploy/cancel/stop.
*/
public Execution(
Executor executor,
ExecutionVertex vertex,
int attemptNumber,
long globalModVersion,
long startTimestamp,
Time rpcTimeout) {
this.executor = checkNotNull(executor);
this.vertex = checkNotNull(vertex);
this.attemptId = new ExecutionAttemptID();
this.rpcTimeout = checkNotNull(rpcTimeout);
this.globalModVersion = globalModVersion;
this.attemptNumber = attemptNumber;
this.stateTimestamps = new long[ExecutionState.values().length];
markTimestamp(CREATED, startTimestamp);
this.partitionInfos = new ArrayList<>(16);
this.producedPartitions = Collections.emptyMap();
this.terminalStateFuture = new CompletableFuture<>();
this.releaseFuture = new CompletableFuture<>();
this.taskManagerLocationFuture = new CompletableFuture<>();
this.assignedResource = null;
}
public ExecutionVertex getVertex() {
return vertex;
}
@Override
public ExecutionAttemptID getAttemptId() {
return attemptId;
}
@Override
public int getAttemptNumber() {
return attemptNumber;
}
@Override
public ExecutionState getState() {
return state;
}
@Nullable
public AllocationID getAssignedAllocationID() {
return assignedAllocationID;
}
/**
* Gets the global modification version of the execution graph when this execution was created.
*
* <p>This version is bumped in the ExecutionGraph whenever a global failover happens. It is used
* to resolve conflicts between concurrent modification by global and local failover actions.
*/
public long getGlobalModVersion() {
return globalModVersion;
}
public CompletableFuture<TaskManagerLocation> getTaskManagerLocationFuture() {
return taskManagerLocationFuture;
}
public LogicalSlot getAssignedResource() {
return assignedResource;
}
public Optional<ResultPartitionDeploymentDescriptor> getResultPartitionDeploymentDescriptor(
IntermediateResultPartitionID id) {
return Optional.ofNullable(producedPartitions.get(id));
}
/**
* Tries to assign the given slot to the execution. The assignment works only if the
* Execution is in state SCHEDULED. Returns true, if the resource could be assigned.
*
* @param logicalSlot to assign to this execution
* @return true if the slot could be assigned to the execution, otherwise false
*/
@VisibleForTesting
boolean tryAssignResource(final LogicalSlot logicalSlot) {
assertRunningInJobMasterMainThread();
checkNotNull(logicalSlot);
if (state == SCHEDULED || state == CREATED) {
if (ASSIGNED_SLOT_UPDATER.compareAndSet(this, null, logicalSlot)) {
if (logicalSlot.tryAssignPayload(this)) {
if ((state == SCHEDULED || state == CREATED) && !taskManagerLocationFuture.isDone()) {
taskManagerLocationFuture.complete(logicalSlot.getTaskManagerLocation());
assignedAllocationID = logicalSlot.getAllocationId();
return true;
} else {
ASSIGNED_SLOT_UPDATER.set(this, null);
return false;
}
} else {
ASSIGNED_SLOT_UPDATER.set(this, null);
return false;
}
} else {
return false;
}
} else {
return false;
}
}
public InputSplit getNextInputSplit() {
final LogicalSlot slot = this.getAssignedResource();
final String host = slot != null ? slot.getTaskManagerLocation().getHostname() : null;
return this.vertex.getNextInputSplit(host);
}
@Override
public TaskManagerLocation getAssignedResourceLocation() {
final LogicalSlot currentAssignedResource = assignedResource;
return currentAssignedResource != null ? currentAssignedResource.getTaskManagerLocation() : null;
}
public Throwable getFailureCause() {
return failureCause;
}
@Override
public String getFailureCauseAsString() {
return ExceptionUtils.stringifyException(getFailureCause());
}
@Override
public long[] getStateTimestamps() {
return stateTimestamps;
}
@Override
public long getStateTimestamp(ExecutionState state) {
return this.stateTimestamps[state.ordinal()];
}
public boolean isFinished() {
return state.isTerminal();
}
@Nullable
public JobManagerTaskRestore getTaskRestore() {
return taskRestore;
}
/**
* Sets the initial state for the execution. The serialized state is then shipped via the
* {@link TaskDeploymentDescriptor} to the TaskManagers.
*
* @param taskRestore information to restore the state
*/
public void setInitialState(@Nullable JobManagerTaskRestore taskRestore) {
this.taskRestore = taskRestore;
}
/**
* Gets a future that completes once the task execution reaches a terminal state.
* The future will be completed with specific state that the execution reached.
* This future is always completed from the job master's main thread.
*
* @return A future which is completed once the execution reaches a terminal state
*/
@Override
public CompletableFuture<ExecutionState> getTerminalStateFuture() {
return terminalStateFuture;
}
/**
* Gets the release future which is completed once the execution reaches a terminal
* state and the assigned resource has been released.
* This future is always completed from the job master's main thread.
*
* @return A future which is completed once the assigned resource has been released
*/
public CompletableFuture<?> getReleaseFuture() {
return releaseFuture;
}
public CompletableFuture<Void> scheduleForExecution() {
final ExecutionGraph executionGraph = getVertex().getExecutionGraph();
final SlotProvider resourceProvider = executionGraph.getSlotProvider();
final boolean allowQueued = executionGraph.isQueuedSchedulingAllowed();
return scheduleForExecution(
resourceProvider,
allowQueued,
LocationPreferenceConstraint.ANY,
Collections.emptySet());
}
/**
* NOTE: This method only throws exceptions if it is in an illegal state to be scheduled, or if the tasks needs
* to be scheduled immediately and no resource is available. If the task is accepted by the schedule, any
* error sets the vertex state to failed and triggers the recovery logic.
*
* @param slotProvider The slot provider to use to allocate slot for this execution attempt.
* @param queued Flag to indicate whether the scheduler may queue this task if it cannot
* immediately deploy it.
* @param locationPreferenceConstraint constraint for the location preferences
* @param allPreviousExecutionGraphAllocationIds set with all previous allocation ids in the job graph.
* Can be empty if the allocation ids are not required for scheduling.
* @return Future which is completed once the Execution has been deployed
*/
public CompletableFuture<Void> scheduleForExecution(
SlotProvider slotProvider,
boolean queued,
LocationPreferenceConstraint locationPreferenceConstraint,
@Nonnull Set<AllocationID> allPreviousExecutionGraphAllocationIds) {
assertRunningInJobMasterMainThread();
final ExecutionGraph executionGraph = vertex.getExecutionGraph();
final Time allocationTimeout = executionGraph.getAllocationTimeout();
try {
final CompletableFuture<Execution> allocationFuture = allocateResourcesForExecution(
slotProvider,
queued,
locationPreferenceConstraint,
allPreviousExecutionGraphAllocationIds,
allocationTimeout);
final CompletableFuture<Void> deploymentFuture;
if (allocationFuture.isDone() || queued) {
deploymentFuture = allocationFuture.thenRun(ThrowingRunnable.unchecked(this::deploy));
} else {
deploymentFuture = FutureUtils.completedExceptionally(
new IllegalArgumentException("The slot allocation future has not been completed yet."));
}
deploymentFuture.whenComplete(
(Void ignored, Throwable failure) -> {
if (failure != null) {
final Throwable stripCompletionException = ExceptionUtils.stripCompletionException(failure);
final Throwable schedulingFailureCause;
if (stripCompletionException instanceof TimeoutException) {
schedulingFailureCause = new NoResourceAvailableException(
"Could not allocate enough slots within timeout of " + allocationTimeout + " to run the job. " +
"Please make sure that the cluster has enough resources.");
} else {
schedulingFailureCause = stripCompletionException;
}
markFailed(schedulingFailureCause);
}
});
return deploymentFuture;
} catch (IllegalExecutionStateException e) {
return FutureUtils.completedExceptionally(e);
}
}
/**
* Allocates resources for the execution.
*
* <p>Allocates following resources:
* <ol>
* <li>slot obtained from the slot provider</li>
* <li>registers produced partitions with the {@link org.apache.flink.runtime.shuffle.ShuffleMaster}</li>
* </ol>
*
* @param slotProvider to obtain a new slot from
* @param queued if the allocation can be queued
* @param locationPreferenceConstraint constraint for the location preferences
* @param allPreviousExecutionGraphAllocationIds set with all previous allocation ids in the job graph.
* Can be empty if the allocation ids are not required for scheduling.
* @param allocationTimeout rpcTimeout for allocating a new slot
* @return Future which is completed with this execution once the slot has been assigned
* or with an exception if an error occurred.
*/
CompletableFuture<Execution> allocateResourcesForExecution(
SlotProvider slotProvider,
boolean queued,
LocationPreferenceConstraint locationPreferenceConstraint,
@Nonnull Set<AllocationID> allPreviousExecutionGraphAllocationIds,
Time allocationTimeout) {
return allocateAndAssignSlotForExecution(
slotProvider,
queued,
locationPreferenceConstraint,
allPreviousExecutionGraphAllocationIds,
allocationTimeout)
.thenCompose(slot -> registerProducedPartitions(slot.getTaskManagerLocation()));
}
/**
* Allocates and assigns a slot obtained from the slot provider to the execution.
*
* @param slotProvider to obtain a new slot from
* @param queued if the allocation can be queued
* @param locationPreferenceConstraint constraint for the location preferences
* @param allPreviousExecutionGraphAllocationIds set with all previous allocation ids in the job graph.
* Can be empty if the allocation ids are not required for scheduling.
* @param allocationTimeout rpcTimeout for allocating a new slot
* @return Future which is completed with the allocated slot once it has been assigned
* or with an exception if an error occurred.
*/
private CompletableFuture<LogicalSlot> allocateAndAssignSlotForExecution(
SlotProvider slotProvider,
boolean queued,
LocationPreferenceConstraint locationPreferenceConstraint,
@Nonnull Set<AllocationID> allPreviousExecutionGraphAllocationIds,
Time allocationTimeout) {
checkNotNull(slotProvider);
assertRunningInJobMasterMainThread();
final SlotSharingGroup sharingGroup = vertex.getJobVertex().getSlotSharingGroup();
final CoLocationConstraint locationConstraint = vertex.getLocationConstraint();
if (locationConstraint != null && sharingGroup == null) {
throw new IllegalStateException(
"Trying to schedule with co-location constraint but without slot sharing allowed.");
}
if (transitionState(CREATED, SCHEDULED)) {
final SlotSharingGroupId slotSharingGroupId = sharingGroup != null ? sharingGroup.getSlotSharingGroupId() : null;
ScheduledUnit toSchedule = locationConstraint == null ?
new ScheduledUnit(this, slotSharingGroupId) :
new ScheduledUnit(this, slotSharingGroupId, locationConstraint);
ExecutionVertex executionVertex = getVertex();
AllocationID lastAllocation = executionVertex.getLatestPriorAllocation();
Collection<AllocationID> previousAllocationIDs =
lastAllocation != null ? Collections.singletonList(lastAllocation) : Collections.emptyList();
final CompletableFuture<Collection<TaskManagerLocation>> preferredLocationsFuture =
calculatePreferredLocations(locationPreferenceConstraint);
final SlotRequestId slotRequestId = new SlotRequestId();
final CompletableFuture<LogicalSlot> logicalSlotFuture =
preferredLocationsFuture.thenCompose(
(Collection<TaskManagerLocation> preferredLocations) ->
slotProvider.allocateSlot(
slotRequestId,
toSchedule,
new SlotProfile(
ResourceProfile.UNKNOWN,
preferredLocations,
previousAllocationIDs,
allPreviousExecutionGraphAllocationIds),
queued,
allocationTimeout));
releaseFuture.whenComplete(
(Object ignored, Throwable throwable) -> {
if (logicalSlotFuture.cancel(false)) {
slotProvider.cancelSlotRequest(
slotRequestId,
slotSharingGroupId,
new FlinkException("Execution " + this + " was released."));
}
});
return logicalSlotFuture.handle(
(LogicalSlot logicalSlot, Throwable failure) -> {
if (failure != null) {
throw new CompletionException(failure);
}
if (tryAssignResource(logicalSlot)) {
return logicalSlot;
} else {
logicalSlot.releaseSlot(new FlinkException("Could not assign logical slot to execution " + this + '.'));
throw new CompletionException(
new FlinkException(
"Could not assign slot " + logicalSlot + " to execution " + this + " because it has already been assigned "));
}
});
} else {
throw new IllegalExecutionStateException(this, CREATED, state);
}
}
@VisibleForTesting
CompletableFuture<Execution> registerProducedPartitions(TaskManagerLocation location) {
assertRunningInJobMasterMainThread();
return FutureUtils.thenApplyAsyncIfNotDone(
registerProducedPartitions(vertex, location, attemptId),
vertex.getExecutionGraph().getJobMasterMainThreadExecutor(),
producedPartitionsCache -> {
producedPartitions = producedPartitionsCache;
return this;
});
}
@VisibleForTesting
static CompletableFuture<Map<IntermediateResultPartitionID, ResultPartitionDeploymentDescriptor>> registerProducedPartitions(
ExecutionVertex vertex,
TaskManagerLocation location,
ExecutionAttemptID attemptId) {
ProducerDescriptor producerDescriptor = ProducerDescriptor.create(location, attemptId);
boolean lazyScheduling = vertex.getExecutionGraph().getScheduleMode().allowLazyDeployment();
Collection<IntermediateResultPartition> partitions = vertex.getProducedPartitions().values();
Collection<CompletableFuture<ResultPartitionDeploymentDescriptor>> partitionRegistrations =
new ArrayList<>(partitions.size());
for (IntermediateResultPartition partition : partitions) {
PartitionDescriptor partitionDescriptor = PartitionDescriptor.from(partition);
int maxParallelism = getPartitionMaxParallelism(partition);
CompletableFuture<? extends ShuffleDescriptor> shuffleDescriptorFuture = vertex
.getExecutionGraph()
.getShuffleMaster()
.registerPartitionWithProducer(partitionDescriptor, producerDescriptor);
CompletableFuture<ResultPartitionDeploymentDescriptor> partitionRegistration = shuffleDescriptorFuture
.thenApply(shuffleDescriptor -> new ResultPartitionDeploymentDescriptor(
partitionDescriptor,
shuffleDescriptor,
maxParallelism,
lazyScheduling));
partitionRegistrations.add(partitionRegistration);
}
return FutureUtils.combineAll(partitionRegistrations).thenApply(rpdds -> {
Map<IntermediateResultPartitionID, ResultPartitionDeploymentDescriptor> producedPartitions =
new LinkedHashMap<>(partitions.size());
rpdds.forEach(rpdd -> producedPartitions.put(rpdd.getPartitionId(), rpdd));
return producedPartitions;
});
}
private static int getPartitionMaxParallelism(IntermediateResultPartition partition) {
final List<List<ExecutionEdge>> consumers = partition.getConsumers();
int maxParallelism = KeyGroupRangeAssignment.UPPER_BOUND_MAX_PARALLELISM;
if (!consumers.isEmpty()) {
List<ExecutionEdge> consumer = consumers.get(0);
ExecutionJobVertex consumerVertex = consumer.get(0).getTarget().getJobVertex();
maxParallelism = consumerVertex.getMaxParallelism();
}
return maxParallelism;
}
/**
* Deploys the execution to the previously assigned resource.
*
* @throws JobException if the execution cannot be deployed to the assigned resource
*/
public void deploy() throws JobException {
assertRunningInJobMasterMainThread();
final LogicalSlot slot = assignedResource;
checkNotNull(slot, "In order to deploy the execution we first have to assign a resource via tryAssignResource.");
if (!slot.isAlive()) {
throw new JobException("Target slot (TaskManager) for deployment is no longer alive.");
}
ExecutionState previous = this.state;
if (previous == SCHEDULED || previous == CREATED) {
if (!transitionState(previous, DEPLOYING)) {
throw new IllegalStateException("Cannot deploy task: Concurrent deployment call race.");
}
}
else {
throw new IllegalStateException("The vertex must be in CREATED or SCHEDULED state to be deployed. Found state " + previous);
}
if (this != slot.getPayload()) {
throw new IllegalStateException(
String.format("The execution %s has not been assigned to the assigned slot.", this));
}
try {
if (this.state != DEPLOYING) {
slot.releaseSlot(new FlinkException("Actual state of execution " + this + " (" + state + ") does not match expected state DEPLOYING."));
return;
}
if (LOG.isInfoEnabled()) {
LOG.info(String.format("Deploying %s (attempt
attemptNumber, getAssignedResourceLocation()));
}
final TaskDeploymentDescriptor deployment = TaskDeploymentDescriptorFactory
.fromExecutionVertex(vertex, attemptNumber)
.createDeploymentDescriptor(
slot.getAllocationId(),
slot.getPhysicalSlotNumber(),
taskRestore,
producedPartitions.values());
taskRestore = null;
final TaskManagerGateway taskManagerGateway = slot.getTaskManagerGateway();
final ComponentMainThreadExecutor jobMasterMainThreadExecutor =
vertex.getExecutionGraph().getJobMasterMainThreadExecutor();
CompletableFuture.supplyAsync(() -> taskManagerGateway.submitTask(deployment, rpcTimeout), executor)
.thenCompose(Function.identity())
.whenCompleteAsync(
(ack, failure) -> {
if (failure != null) {
if (failure instanceof TimeoutException) {
String taskname = vertex.getTaskNameWithSubtaskIndex() + " (" + attemptId + ')';
markFailed(new Exception(
"Cannot deploy task " + taskname + " - TaskManager (" + getAssignedResourceLocation()
+ ") not responding after a rpcTimeout of " + rpcTimeout, failure));
} else {
markFailed(failure);
}
}
},
jobMasterMainThreadExecutor);
}
catch (Throwable t) {
markFailed(t);
ExceptionUtils.rethrow(t);
}
}
public void cancel() {
assertRunningInJobMasterMainThread();
while (true) {
ExecutionState current = this.state;
if (current == CANCELING || current == CANCELED) {
return;
}
else if (current == RUNNING || current == DEPLOYING) {
if (startCancelling(NUM_CANCEL_CALL_TRIES)) {
return;
}
}
else if (current == FINISHED || current == FAILED) {
sendReleaseIntermediateResultPartitionsRpcCall();
return;
}
else if (current == CREATED || current == SCHEDULED) {
if (cancelAtomically()) {
return;
}
}
else {
throw new IllegalStateException(current.name());
}
}
}
public CompletableFuture<?> suspend() {
switch(state) {
case RUNNING:
case DEPLOYING:
case CREATED:
case SCHEDULED:
if (!cancelAtomically()) {
throw new IllegalStateException(
String.format("Could not directly go to %s from %s.", CANCELED.name(), state.name()));
}
break;
case CANCELING:
completeCancelling();
break;
case FINISHED:
case FAILED:
sendReleaseIntermediateResultPartitionsRpcCall();
break;
case CANCELED:
break;
default:
throw new IllegalStateException(state.name());
}
return releaseFuture;
}
private void scheduleConsumer(ExecutionVertex consumerVertex) {
try {
final ExecutionGraph executionGraph = consumerVertex.getExecutionGraph();
consumerVertex.scheduleForExecution(
executionGraph.getSlotProvider(),
executionGraph.isQueuedSchedulingAllowed(),
LocationPreferenceConstraint.ANY,
Collections.emptySet());
} catch (Throwable t) {
consumerVertex.fail(new IllegalStateException("Could not schedule consumer " +
"vertex " + consumerVertex, t));
}
}
void scheduleOrUpdateConsumers(List<List<ExecutionEdge>> allConsumers) {
assertRunningInJobMasterMainThread();
final int numConsumers = allConsumers.size();
if (numConsumers > 1) {
fail(new IllegalStateException("Currently, only a single consumer group per partition is supported."));
} else if (numConsumers == 0) {
return;
}
for (ExecutionEdge edge : allConsumers.get(0)) {
final ExecutionVertex consumerVertex = edge.getTarget();
final Execution consumer = consumerVertex.getCurrentExecutionAttempt();
final ExecutionState consumerState = consumer.getState();
if (consumerState == CREATED) {
if (consumerVertex.getInputDependencyConstraint() == InputDependencyConstraint.ANY ||
consumerVertex.checkInputDependencyConstraints()) {
scheduleConsumer(consumerVertex);
}
}
else if (consumerState == DEPLOYING || consumerState == RUNNING) {
final PartitionInfo partitionInfo = createPartitionInfo(edge);
if (consumerState == DEPLOYING) {
consumerVertex.cachePartitionInfo(partitionInfo);
} else {
consumer.sendUpdatePartitionInfoRpcCall(Collections.singleton(partitionInfo));
}
}
}
}
private static PartitionInfo createPartitionInfo(ExecutionEdge executionEdge) {
IntermediateDataSetID intermediateDataSetID = executionEdge.getSource().getIntermediateResult().getId();
ShuffleDescriptor shuffleDescriptor = getConsumedPartitionShuffleDescriptor(executionEdge, false);
return new PartitionInfo(intermediateDataSetID, shuffleDescriptor);
}
/**
* This method fails the vertex due to an external condition. The task will move to state FAILED.
* If the task was in state RUNNING or DEPLOYING before, it will send a cancel call to the TaskManager.
*
* @param t The exception that caused the task to fail.
*/
@Override
public void fail(Throwable t) {
processFail(t, false);
}
/**
* Request a stack trace sample from the task of this execution.
*
* @param sampleId of the stack trace sample
* @param numSamples the sample should contain
* @param delayBetweenSamples to wait
* @param maxStackTraceDepth of the samples
* @param timeout until the request times out
* @return Future stack trace sample response
*/
public CompletableFuture<StackTraceSampleResponse> requestStackTraceSample(
int sampleId,
int numSamples,
Time delayBetweenSamples,
int maxStackTraceDepth,
Time timeout) {
final LogicalSlot slot = assignedResource;
if (slot != null) {
final TaskManagerGateway taskManagerGateway = slot.getTaskManagerGateway();
return taskManagerGateway.requestStackTraceSample(
attemptId,
sampleId,
numSamples,
delayBetweenSamples,
maxStackTraceDepth,
timeout);
} else {
return FutureUtils.completedExceptionally(new Exception("The execution has no slot assigned."));
}
}
/**
* Notify the task of this execution about a completed checkpoint.
*
* @param checkpointId of the completed checkpoint
* @param timestamp of the completed checkpoint
*/
public void notifyCheckpointComplete(long checkpointId, long timestamp) {
final LogicalSlot slot = assignedResource;
if (slot != null) {
final TaskManagerGateway taskManagerGateway = slot.getTaskManagerGateway();
taskManagerGateway.notifyCheckpointComplete(attemptId, getVertex().getJobId(), checkpointId, timestamp);
} else {
LOG.debug("The execution has no slot assigned. This indicates that the execution is " +
"no longer running.");
}
}
/**
* Trigger a new checkpoint on the task of this execution.
*
* @param checkpointId of th checkpoint to trigger
* @param timestamp of the checkpoint to trigger
* @param checkpointOptions of the checkpoint to trigger
*/
public void triggerCheckpoint(long checkpointId, long timestamp, CheckpointOptions checkpointOptions) {
triggerCheckpointHelper(checkpointId, timestamp, checkpointOptions, false);
}
/**
* Trigger a new checkpoint on the task of this execution.
*
* @param checkpointId of th checkpoint to trigger
* @param timestamp of the checkpoint to trigger
* @param checkpointOptions of the checkpoint to trigger
* @param advanceToEndOfEventTime Flag indicating if the source should inject a {@code MAX_WATERMARK} in the pipeline
* to fire any registered event-time timers
*/
public void triggerSynchronousSavepoint(long checkpointId, long timestamp, CheckpointOptions checkpointOptions, boolean advanceToEndOfEventTime) {
triggerCheckpointHelper(checkpointId, timestamp, checkpointOptions, advanceToEndOfEventTime);
}
private void triggerCheckpointHelper(long checkpointId, long timestamp, CheckpointOptions checkpointOptions, boolean advanceToEndOfEventTime) {
final CheckpointType checkpointType = checkpointOptions.getCheckpointType();
if (advanceToEndOfEventTime && !(checkpointType.isSynchronous() && checkpointType.isSavepoint())) {
throw new IllegalArgumentException("Only synchronous savepoints are allowed to advance the watermark to MAX.");
}
final LogicalSlot slot = assignedResource;
if (slot != null) {
final TaskManagerGateway taskManagerGateway = slot.getTaskManagerGateway();
taskManagerGateway.triggerCheckpoint(attemptId, getVertex().getJobId(), checkpointId, timestamp, checkpointOptions, advanceToEndOfEventTime);
} else {
LOG.debug("The execution has no slot assigned. This indicates that the execution is no longer running.");
}
}
/**
* This method marks the task as failed, but will make no attempt to remove task execution from the task manager.
* It is intended for cases where the task is known not to be running, or then the TaskManager reports failure
* (in which case it has already removed the task).
*
* @param t The exception that caused the task to fail.
*/
void markFailed(Throwable t) {
processFail(t, true);
}
void markFailed(Throwable t, Map<String, Accumulator<?, ?>> userAccumulators, IOMetrics metrics) {
processFail(t, true, userAccumulators, metrics);
}
void markFinished() {
markFinished(null, null);
}
private boolean cancelAtomically() {
if (startCancelling(0)) {
completeCancelling();
return true;
} else {
return false;
}
}
private boolean startCancelling(int numberCancelRetries) {
if (transitionState(state, CANCELING)) {
taskManagerLocationFuture.cancel(false);
sendCancelRpcCall(numberCancelRetries);
return true;
} else {
return false;
}
}
void completeCancelling() {
completeCancelling(null, null);
}
void completeCancelling(Map<String, Accumulator<?, ?>> userAccumulators, IOMetrics metrics) {
while (true) {
ExecutionState current = this.state;
if (current == CANCELED) {
return;
}
else if (current == CANCELING || current == RUNNING || current == DEPLOYING) {
updateAccumulatorsAndMetrics(userAccumulators, metrics);
if (transitionState(current, CANCELED)) {
finishCancellation();
return;
}
}
else {
if (current != FAILED) {
String message = String.format("Asynchronous race: Found %s in state %s after successful cancel call.", vertex.getTaskNameWithSubtaskIndex(), state);
LOG.error(message);
vertex.getExecutionGraph().failGlobal(new Exception(message));
}
return;
}
}
}
private void finishCancellation() {
releaseAssignedResource(new FlinkException("Execution " + this + " was cancelled."));
vertex.getExecutionGraph().deregisterExecution(this);
}
void cachePartitionInfo(PartitionInfo partitionInfo) {
partitionInfos.add(partitionInfo);
}
private void sendPartitionInfos() {
if (!partitionInfos.isEmpty()) {
sendUpdatePartitionInfoRpcCall(new ArrayList<>(partitionInfos));
partitionInfos.clear();
}
}
private boolean processFail(Throwable t, boolean isCallback) {
return processFail(t, isCallback, null, null);
}
private boolean processFail(Throwable t, boolean isCallback, Map<String, Accumulator<?, ?>> userAccumulators, IOMetrics metrics) {
assertRunningInJobMasterMainThread();
while (true) {
ExecutionState current = this.state;
if (current == FAILED) {
return false;
}
if (current == CANCELED || current == FINISHED) {
if (LOG.isDebugEnabled()) {
LOG.debug("Ignoring transition of vertex {} to {} while being {}.", getVertexWithAttempt(), FAILED, current);
}
return false;
}
if (current == CANCELING) {
completeCancelling(userAccumulators, metrics);
return false;
}
if (transitionState(current, FAILED, t)) {
this.failureCause = t;
updateAccumulatorsAndMetrics(userAccumulators, metrics);
releaseAssignedResource(t);
vertex.getExecutionGraph().deregisterExecution(this);
if (!isCallback && (current == RUNNING || current == DEPLOYING)) {
if (LOG.isDebugEnabled()) {
LOG.debug("Sending out cancel request, to remove task execution from TaskManager.");
}
try {
if (assignedResource != null) {
sendCancelRpcCall(NUM_CANCEL_CALL_TRIES);
}
} catch (Throwable tt) {
LOG.error("Error triggering cancel call while marking task {} as failed.", getVertex().getTaskNameWithSubtaskIndex(), tt);
}
}
return true;
}
}
}
boolean switchToRunning() {
if (transitionState(DEPLOYING, RUNNING)) {
sendPartitionInfos();
return true;
}
else {
ExecutionState currentState = this.state;
if (currentState == FINISHED || currentState == CANCELED) {
}
else if (currentState == CANCELING || currentState == FAILED) {
if (LOG.isDebugEnabled()) {
LOG.debug("Concurrent canceling/failing of {} while deployment was in progress.", getVertexWithAttempt());
}
sendCancelRpcCall(NUM_CANCEL_CALL_TRIES);
}
else {
String message = String.format("Concurrent unexpected state transition of task %s to %s while deployment was in progress.",
getVertexWithAttempt(), currentState);
if (LOG.isDebugEnabled()) {
LOG.debug(message);
}
sendCancelRpcCall(NUM_CANCEL_CALL_TRIES);
markFailed(new Exception(message));
}
return false;
}
}
/**
* This method sends a CancelTask message to the instance of the assigned slot.
*
* <p>The sending is tried up to NUM_CANCEL_CALL_TRIES times.
*/
private void sendCancelRpcCall(int numberRetries) {
final LogicalSlot slot = assignedResource;
if (slot != null) {
final TaskManagerGateway taskManagerGateway = slot.getTaskManagerGateway();
final ComponentMainThreadExecutor jobMasterMainThreadExecutor =
getVertex().getExecutionGraph().getJobMasterMainThreadExecutor();
CompletableFuture<Acknowledge> cancelResultFuture = FutureUtils.retry(
() -> taskManagerGateway.cancelTask(attemptId, rpcTimeout),
numberRetries,
jobMasterMainThreadExecutor);
cancelResultFuture.whenComplete(
(ack, failure) -> {
if (failure != null) {
fail(new Exception("Task could not be canceled.", failure));
}
});
}
}
private void sendReleaseIntermediateResultPartitionsRpcCall() {
LOG.info("Discarding the results produced by task execution {}.", attemptId);
final LogicalSlot slot = assignedResource;
if (slot != null) {
final TaskManagerGateway taskManagerGateway = slot.getTaskManagerGateway();
Collection<IntermediateResultPartition> partitions = vertex.getProducedPartitions().values();
Collection<ResultPartitionID> partitionIds = new ArrayList<>(partitions.size());
for (IntermediateResultPartition partition : partitions) {
partitionIds.add(new ResultPartitionID(partition.getPartitionId(), attemptId));
}
if (!partitionIds.isEmpty()) {
taskManagerGateway.releasePartitions(getVertex().getJobId(), partitionIds);
}
}
}
/**
* Update the partition infos on the assigned resource.
*
* @param partitionInfos for the remote task
*/
private void sendUpdatePartitionInfoRpcCall(
final Iterable<PartitionInfo> partitionInfos) {
final LogicalSlot slot = assignedResource;
if (slot != null) {
final TaskManagerGateway taskManagerGateway = slot.getTaskManagerGateway();
final TaskManagerLocation taskManagerLocation = slot.getTaskManagerLocation();
CompletableFuture<Acknowledge> updatePartitionsResultFuture = taskManagerGateway.updatePartitions(attemptId, partitionInfos, rpcTimeout);
updatePartitionsResultFuture.whenCompleteAsync(
(ack, failure) -> {
if (failure != null) {
fail(new IllegalStateException("Update task on TaskManager " + taskManagerLocation +
" failed due to:", failure));
}
}, getVertex().getExecutionGraph().getJobMasterMainThreadExecutor());
}
}
/**
* Releases the assigned resource and completes the release future
* once the assigned resource has been successfully released.
*
* @param cause for the resource release, null if none
*/
private void releaseAssignedResource(@Nullable Throwable cause) {
assertRunningInJobMasterMainThread();
final LogicalSlot slot = assignedResource;
if (slot != null) {
ComponentMainThreadExecutor jobMasterMainThreadExecutor =
getVertex().getExecutionGraph().getJobMasterMainThreadExecutor();
slot.releaseSlot(cause)
.whenComplete((Object ignored, Throwable throwable) -> {
jobMasterMainThreadExecutor.assertRunningInMainThread();
if (throwable != null) {
releaseFuture.completeExceptionally(throwable);
} else {
releaseFuture.complete(null);
}
});
} else {
releaseFuture.complete(null);
}
}
/**
* Calculates the preferred locations based on the location preference constraint.
*
* @param locationPreferenceConstraint constraint for the location preference
* @return Future containing the collection of preferred locations. This might not be completed if not all inputs
* have been a resource assigned.
*/
@VisibleForTesting
public CompletableFuture<Collection<TaskManagerLocation>> calculatePreferredLocations(LocationPreferenceConstraint locationPreferenceConstraint) {
final Collection<CompletableFuture<TaskManagerLocation>> preferredLocationFutures = getVertex().getPreferredLocations();
final CompletableFuture<Collection<TaskManagerLocation>> preferredLocationsFuture;
switch(locationPreferenceConstraint) {
case ALL:
preferredLocationsFuture = FutureUtils.combineAll(preferredLocationFutures);
break;
case ANY:
final ArrayList<TaskManagerLocation> completedTaskManagerLocations = new ArrayList<>(preferredLocationFutures.size());
for (CompletableFuture<TaskManagerLocation> preferredLocationFuture : preferredLocationFutures) {
if (preferredLocationFuture.isDone() && !preferredLocationFuture.isCompletedExceptionally()) {
final TaskManagerLocation taskManagerLocation = preferredLocationFuture.getNow(null);
if (taskManagerLocation == null) {
throw new FlinkRuntimeException("TaskManagerLocationFuture was completed with null. This indicates a programming bug.");
}
completedTaskManagerLocations.add(taskManagerLocation);
}
}
preferredLocationsFuture = CompletableFuture.completedFuture(completedTaskManagerLocations);
break;
default:
throw new RuntimeException("Unknown LocationPreferenceConstraint " + locationPreferenceConstraint + '.');
}
return preferredLocationsFuture;
}
private boolean transitionState(ExecutionState currentState, ExecutionState targetState) {
return transitionState(currentState, targetState, null);
}
private boolean transitionState(ExecutionState currentState, ExecutionState targetState, Throwable error) {
if (currentState.isTerminal()) {
throw new IllegalStateException("Cannot leave terminal state " + currentState + " to transition to " + targetState + '.');
}
if (STATE_UPDATER.compareAndSet(this, currentState, targetState)) {
markTimestamp(targetState);
if (error == null) {
LOG.info("{} ({}) switched from {} to {}.", getVertex().getTaskNameWithSubtaskIndex(), getAttemptId(), currentState, targetState);
} else {
LOG.info("{} ({}) switched from {} to {}.", getVertex().getTaskNameWithSubtaskIndex(), getAttemptId(), currentState, targetState, error);
}
if (targetState.isTerminal()) {
terminalStateFuture.complete(targetState);
}
try {
vertex.notifyStateTransition(this, targetState, error);
}
catch (Throwable t) {
LOG.error("Error while notifying execution graph of execution state transition.", t);
}
return true;
} else {
return false;
}
}
private void markTimestamp(ExecutionState state) {
markTimestamp(state, System.currentTimeMillis());
}
private void markTimestamp(ExecutionState state, long timestamp) {
this.stateTimestamps[state.ordinal()] = timestamp;
}
public String getVertexWithAttempt() {
return vertex.getTaskNameWithSubtaskIndex() + " - execution
}
/**
* Update accumulators (discarded when the Execution has already been terminated).
* @param userAccumulators the user accumulators
*/
public void setAccumulators(Map<String, Accumulator<?, ?>> userAccumulators) {
synchronized (accumulatorLock) {
if (!state.isTerminal()) {
this.userAccumulators = userAccumulators;
}
}
}
public Map<String, Accumulator<?, ?>> getUserAccumulators() {
return userAccumulators;
}
@Override
public StringifiedAccumulatorResult[] getUserAccumulatorsStringified() {
Map<String, OptionalFailure<Accumulator<?, ?>>> accumulators =
userAccumulators == null ?
null :
userAccumulators.entrySet()
.stream()
.collect(Collectors.toMap(Map.Entry::getKey, entry -> OptionalFailure.of(entry.getValue())));
return StringifiedAccumulatorResult.stringifyAccumulatorResults(accumulators);
}
@Override
public int getParallelSubtaskIndex() {
return getVertex().getParallelSubtaskIndex();
}
@Override
public IOMetrics getIOMetrics() {
return ioMetrics;
}
private void updateAccumulatorsAndMetrics(Map<String, Accumulator<?, ?>> userAccumulators, IOMetrics metrics) {
if (userAccumulators != null) {
synchronized (accumulatorLock) {
this.userAccumulators = userAccumulators;
}
}
if (metrics != null) {
this.ioMetrics = metrics;
}
}
@Override
public String toString() {
final LogicalSlot slot = assignedResource;
return String.format("Attempt
(slot == null ? "(unassigned)" : slot), state);
}
@Override
public ArchivedExecution archive() {
return new ArchivedExecution(this);
}
private void assertRunningInJobMasterMainThread() {
vertex.getExecutionGraph().assertRunningInJobMasterMainThread();
}
} | class Execution implements AccessExecution, Archiveable<ArchivedExecution>, LogicalSlot.Payload {
private static final AtomicReferenceFieldUpdater<Execution, ExecutionState> STATE_UPDATER =
AtomicReferenceFieldUpdater.newUpdater(Execution.class, ExecutionState.class, "state");
private static final AtomicReferenceFieldUpdater<Execution, LogicalSlot> ASSIGNED_SLOT_UPDATER = AtomicReferenceFieldUpdater.newUpdater(
Execution.class,
LogicalSlot.class,
"assignedResource");
private static final Logger LOG = ExecutionGraph.LOG;
private static final int NUM_CANCEL_CALL_TRIES = 3;
private static final int NUM_STOP_CALL_TRIES = 3;
/** The executor which is used to execute futures. */
private final Executor executor;
/** The execution vertex whose task this execution executes. */
private final ExecutionVertex vertex;
/** The unique ID marking the specific execution instant of the task. */
private final ExecutionAttemptID attemptId;
/** Gets the global modification version of the execution graph when this execution was created.
* This version is bumped in the ExecutionGraph whenever a global failover happens. It is used
* to resolve conflicts between concurrent modification by global and local failover actions. */
private final long globalModVersion;
/** The timestamps when state transitions occurred, indexed by {@link ExecutionState
private final long[] stateTimestamps;
private final int attemptNumber;
private final Time rpcTimeout;
private final Collection<PartitionInfo> partitionInfos;
/** A future that completes once the Execution reaches a terminal ExecutionState. */
private final CompletableFuture<ExecutionState> terminalStateFuture;
private final CompletableFuture<?> releaseFuture;
private final CompletableFuture<TaskManagerLocation> taskManagerLocationFuture;
private volatile ExecutionState state = CREATED;
private volatile LogicalSlot assignedResource;
private volatile Throwable failureCause;
/** Information to restore the task on recovery, such as checkpoint id and task state snapshot. */
@Nullable
private volatile JobManagerTaskRestore taskRestore;
/** This field holds the allocation id once it was assigned successfully. */
@Nullable
private volatile AllocationID assignedAllocationID;
/** Lock for updating the accumulators atomically.
* Prevents final accumulators to be overwritten by partial accumulators on a late heartbeat. */
private final Object accumulatorLock = new Object();
/* Continuously updated map of user-defined accumulators */
private volatile Map<String, Accumulator<?, ?>> userAccumulators;
private volatile IOMetrics ioMetrics;
private Map<IntermediateResultPartitionID, ResultPartitionDeploymentDescriptor> producedPartitions;
/**
* Creates a new Execution attempt.
*
* @param executor
* The executor used to dispatch callbacks from futures and asynchronous RPC calls.
* @param vertex
* The execution vertex to which this Execution belongs
* @param attemptNumber
* The execution attempt number.
* @param globalModVersion
* The global modification version of the execution graph when this execution was created
* @param startTimestamp
* The timestamp that marks the creation of this Execution
* @param rpcTimeout
* The rpcTimeout for RPC calls like deploy/cancel/stop.
*/
public Execution(
Executor executor,
ExecutionVertex vertex,
int attemptNumber,
long globalModVersion,
long startTimestamp,
Time rpcTimeout) {
this.executor = checkNotNull(executor);
this.vertex = checkNotNull(vertex);
this.attemptId = new ExecutionAttemptID();
this.rpcTimeout = checkNotNull(rpcTimeout);
this.globalModVersion = globalModVersion;
this.attemptNumber = attemptNumber;
this.stateTimestamps = new long[ExecutionState.values().length];
markTimestamp(CREATED, startTimestamp);
this.partitionInfos = new ArrayList<>(16);
this.producedPartitions = Collections.emptyMap();
this.terminalStateFuture = new CompletableFuture<>();
this.releaseFuture = new CompletableFuture<>();
this.taskManagerLocationFuture = new CompletableFuture<>();
this.assignedResource = null;
}
public ExecutionVertex getVertex() {
return vertex;
}
@Override
public ExecutionAttemptID getAttemptId() {
return attemptId;
}
@Override
public int getAttemptNumber() {
return attemptNumber;
}
@Override
public ExecutionState getState() {
return state;
}
@Nullable
public AllocationID getAssignedAllocationID() {
return assignedAllocationID;
}
/**
* Gets the global modification version of the execution graph when this execution was created.
*
* <p>This version is bumped in the ExecutionGraph whenever a global failover happens. It is used
* to resolve conflicts between concurrent modification by global and local failover actions.
*/
public long getGlobalModVersion() {
return globalModVersion;
}
public CompletableFuture<TaskManagerLocation> getTaskManagerLocationFuture() {
return taskManagerLocationFuture;
}
public LogicalSlot getAssignedResource() {
return assignedResource;
}
public Optional<ResultPartitionDeploymentDescriptor> getResultPartitionDeploymentDescriptor(
IntermediateResultPartitionID id) {
return Optional.ofNullable(producedPartitions.get(id));
}
/**
* Tries to assign the given slot to the execution. The assignment works only if the
* Execution is in state SCHEDULED. Returns true, if the resource could be assigned.
*
* @param logicalSlot to assign to this execution
* @return true if the slot could be assigned to the execution, otherwise false
*/
@VisibleForTesting
boolean tryAssignResource(final LogicalSlot logicalSlot) {
assertRunningInJobMasterMainThread();
checkNotNull(logicalSlot);
if (state == SCHEDULED || state == CREATED) {
if (ASSIGNED_SLOT_UPDATER.compareAndSet(this, null, logicalSlot)) {
if (logicalSlot.tryAssignPayload(this)) {
if ((state == SCHEDULED || state == CREATED) && !taskManagerLocationFuture.isDone()) {
taskManagerLocationFuture.complete(logicalSlot.getTaskManagerLocation());
assignedAllocationID = logicalSlot.getAllocationId();
return true;
} else {
ASSIGNED_SLOT_UPDATER.set(this, null);
return false;
}
} else {
ASSIGNED_SLOT_UPDATER.set(this, null);
return false;
}
} else {
return false;
}
} else {
return false;
}
}
public InputSplit getNextInputSplit() {
final LogicalSlot slot = this.getAssignedResource();
final String host = slot != null ? slot.getTaskManagerLocation().getHostname() : null;
return this.vertex.getNextInputSplit(host);
}
@Override
public TaskManagerLocation getAssignedResourceLocation() {
final LogicalSlot currentAssignedResource = assignedResource;
return currentAssignedResource != null ? currentAssignedResource.getTaskManagerLocation() : null;
}
public Throwable getFailureCause() {
return failureCause;
}
@Override
public String getFailureCauseAsString() {
return ExceptionUtils.stringifyException(getFailureCause());
}
@Override
public long[] getStateTimestamps() {
return stateTimestamps;
}
@Override
public long getStateTimestamp(ExecutionState state) {
return this.stateTimestamps[state.ordinal()];
}
public boolean isFinished() {
return state.isTerminal();
}
@Nullable
public JobManagerTaskRestore getTaskRestore() {
return taskRestore;
}
/**
* Sets the initial state for the execution. The serialized state is then shipped via the
* {@link TaskDeploymentDescriptor} to the TaskManagers.
*
* @param taskRestore information to restore the state
*/
public void setInitialState(@Nullable JobManagerTaskRestore taskRestore) {
this.taskRestore = taskRestore;
}
/**
* Gets a future that completes once the task execution reaches a terminal state.
* The future will be completed with specific state that the execution reached.
* This future is always completed from the job master's main thread.
*
* @return A future which is completed once the execution reaches a terminal state
*/
@Override
public CompletableFuture<ExecutionState> getTerminalStateFuture() {
return terminalStateFuture;
}
/**
* Gets the release future which is completed once the execution reaches a terminal
* state and the assigned resource has been released.
* This future is always completed from the job master's main thread.
*
* @return A future which is completed once the assigned resource has been released
*/
public CompletableFuture<?> getReleaseFuture() {
return releaseFuture;
}
public CompletableFuture<Void> scheduleForExecution() {
final ExecutionGraph executionGraph = getVertex().getExecutionGraph();
final SlotProvider resourceProvider = executionGraph.getSlotProvider();
final boolean allowQueued = executionGraph.isQueuedSchedulingAllowed();
return scheduleForExecution(
resourceProvider,
allowQueued,
LocationPreferenceConstraint.ANY,
Collections.emptySet());
}
/**
* NOTE: This method only throws exceptions if it is in an illegal state to be scheduled, or if the tasks needs
* to be scheduled immediately and no resource is available. If the task is accepted by the schedule, any
* error sets the vertex state to failed and triggers the recovery logic.
*
* @param slotProvider The slot provider to use to allocate slot for this execution attempt.
* @param queued Flag to indicate whether the scheduler may queue this task if it cannot
* immediately deploy it.
* @param locationPreferenceConstraint constraint for the location preferences
* @param allPreviousExecutionGraphAllocationIds set with all previous allocation ids in the job graph.
* Can be empty if the allocation ids are not required for scheduling.
* @return Future which is completed once the Execution has been deployed
*/
public CompletableFuture<Void> scheduleForExecution(
SlotProvider slotProvider,
boolean queued,
LocationPreferenceConstraint locationPreferenceConstraint,
@Nonnull Set<AllocationID> allPreviousExecutionGraphAllocationIds) {
assertRunningInJobMasterMainThread();
final ExecutionGraph executionGraph = vertex.getExecutionGraph();
final Time allocationTimeout = executionGraph.getAllocationTimeout();
try {
final CompletableFuture<Execution> allocationFuture = allocateResourcesForExecution(
slotProvider,
queued,
locationPreferenceConstraint,
allPreviousExecutionGraphAllocationIds,
allocationTimeout);
final CompletableFuture<Void> deploymentFuture;
if (allocationFuture.isDone() || queued) {
deploymentFuture = allocationFuture.thenRun(ThrowingRunnable.unchecked(this::deploy));
} else {
deploymentFuture = FutureUtils.completedExceptionally(
new IllegalArgumentException("The slot allocation future has not been completed yet."));
}
deploymentFuture.whenComplete(
(Void ignored, Throwable failure) -> {
if (failure != null) {
final Throwable stripCompletionException = ExceptionUtils.stripCompletionException(failure);
final Throwable schedulingFailureCause;
if (stripCompletionException instanceof TimeoutException) {
schedulingFailureCause = new NoResourceAvailableException(
"Could not allocate enough slots within timeout of " + allocationTimeout + " to run the job. " +
"Please make sure that the cluster has enough resources.");
} else {
schedulingFailureCause = stripCompletionException;
}
markFailed(schedulingFailureCause);
}
});
return deploymentFuture;
} catch (IllegalExecutionStateException e) {
return FutureUtils.completedExceptionally(e);
}
}
/**
* Allocates resources for the execution.
*
* <p>Allocates following resources:
* <ol>
* <li>slot obtained from the slot provider</li>
* <li>registers produced partitions with the {@link org.apache.flink.runtime.shuffle.ShuffleMaster}</li>
* </ol>
*
* @param slotProvider to obtain a new slot from
* @param queued if the allocation can be queued
* @param locationPreferenceConstraint constraint for the location preferences
* @param allPreviousExecutionGraphAllocationIds set with all previous allocation ids in the job graph.
* Can be empty if the allocation ids are not required for scheduling.
* @param allocationTimeout rpcTimeout for allocating a new slot
* @return Future which is completed with this execution once the slot has been assigned
* or with an exception if an error occurred.
*/
CompletableFuture<Execution> allocateResourcesForExecution(
SlotProvider slotProvider,
boolean queued,
LocationPreferenceConstraint locationPreferenceConstraint,
@Nonnull Set<AllocationID> allPreviousExecutionGraphAllocationIds,
Time allocationTimeout) {
return allocateAndAssignSlotForExecution(
slotProvider,
queued,
locationPreferenceConstraint,
allPreviousExecutionGraphAllocationIds,
allocationTimeout)
.thenCompose(slot -> registerProducedPartitions(slot.getTaskManagerLocation()));
}
/**
* Allocates and assigns a slot obtained from the slot provider to the execution.
*
* @param slotProvider to obtain a new slot from
* @param queued if the allocation can be queued
* @param locationPreferenceConstraint constraint for the location preferences
* @param allPreviousExecutionGraphAllocationIds set with all previous allocation ids in the job graph.
* Can be empty if the allocation ids are not required for scheduling.
* @param allocationTimeout rpcTimeout for allocating a new slot
* @return Future which is completed with the allocated slot once it has been assigned
* or with an exception if an error occurred.
*/
private CompletableFuture<LogicalSlot> allocateAndAssignSlotForExecution(
SlotProvider slotProvider,
boolean queued,
LocationPreferenceConstraint locationPreferenceConstraint,
@Nonnull Set<AllocationID> allPreviousExecutionGraphAllocationIds,
Time allocationTimeout) {
checkNotNull(slotProvider);
assertRunningInJobMasterMainThread();
final SlotSharingGroup sharingGroup = vertex.getJobVertex().getSlotSharingGroup();
final CoLocationConstraint locationConstraint = vertex.getLocationConstraint();
if (locationConstraint != null && sharingGroup == null) {
throw new IllegalStateException(
"Trying to schedule with co-location constraint but without slot sharing allowed.");
}
if (transitionState(CREATED, SCHEDULED)) {
final SlotSharingGroupId slotSharingGroupId = sharingGroup != null ? sharingGroup.getSlotSharingGroupId() : null;
ScheduledUnit toSchedule = locationConstraint == null ?
new ScheduledUnit(this, slotSharingGroupId) :
new ScheduledUnit(this, slotSharingGroupId, locationConstraint);
ExecutionVertex executionVertex = getVertex();
AllocationID lastAllocation = executionVertex.getLatestPriorAllocation();
Collection<AllocationID> previousAllocationIDs =
lastAllocation != null ? Collections.singletonList(lastAllocation) : Collections.emptyList();
final CompletableFuture<Collection<TaskManagerLocation>> preferredLocationsFuture =
calculatePreferredLocations(locationPreferenceConstraint);
final SlotRequestId slotRequestId = new SlotRequestId();
final CompletableFuture<LogicalSlot> logicalSlotFuture =
preferredLocationsFuture.thenCompose(
(Collection<TaskManagerLocation> preferredLocations) ->
slotProvider.allocateSlot(
slotRequestId,
toSchedule,
new SlotProfile(
ResourceProfile.UNKNOWN,
preferredLocations,
previousAllocationIDs,
allPreviousExecutionGraphAllocationIds),
queued,
allocationTimeout));
releaseFuture.whenComplete(
(Object ignored, Throwable throwable) -> {
if (logicalSlotFuture.cancel(false)) {
slotProvider.cancelSlotRequest(
slotRequestId,
slotSharingGroupId,
new FlinkException("Execution " + this + " was released."));
}
});
return logicalSlotFuture.handle(
(LogicalSlot logicalSlot, Throwable failure) -> {
if (failure != null) {
throw new CompletionException(failure);
}
if (tryAssignResource(logicalSlot)) {
return logicalSlot;
} else {
logicalSlot.releaseSlot(new FlinkException("Could not assign logical slot to execution " + this + '.'));
throw new CompletionException(
new FlinkException(
"Could not assign slot " + logicalSlot + " to execution " + this + " because it has already been assigned "));
}
});
} else {
throw new IllegalExecutionStateException(this, CREATED, state);
}
}
@VisibleForTesting
CompletableFuture<Execution> registerProducedPartitions(TaskManagerLocation location) {
assertRunningInJobMasterMainThread();
return FutureUtils.thenApplyAsyncIfNotDone(
registerProducedPartitions(vertex, location, attemptId),
vertex.getExecutionGraph().getJobMasterMainThreadExecutor(),
producedPartitionsCache -> {
producedPartitions = producedPartitionsCache;
startTrackingPartitions(location.getResourceID(), producedPartitionsCache.values());
return this;
});
}
@VisibleForTesting
static CompletableFuture<Map<IntermediateResultPartitionID, ResultPartitionDeploymentDescriptor>> registerProducedPartitions(
ExecutionVertex vertex,
TaskManagerLocation location,
ExecutionAttemptID attemptId) {
ProducerDescriptor producerDescriptor = ProducerDescriptor.create(location, attemptId);
boolean lazyScheduling = vertex.getExecutionGraph().getScheduleMode().allowLazyDeployment();
Collection<IntermediateResultPartition> partitions = vertex.getProducedPartitions().values();
Collection<CompletableFuture<ResultPartitionDeploymentDescriptor>> partitionRegistrations =
new ArrayList<>(partitions.size());
for (IntermediateResultPartition partition : partitions) {
PartitionDescriptor partitionDescriptor = PartitionDescriptor.from(partition);
int maxParallelism = getPartitionMaxParallelism(partition);
CompletableFuture<? extends ShuffleDescriptor> shuffleDescriptorFuture = vertex
.getExecutionGraph()
.getShuffleMaster()
.registerPartitionWithProducer(partitionDescriptor, producerDescriptor);
final boolean releasePartitionOnConsumption =
vertex.getExecutionGraph().isForcePartitionReleaseOnConsumption()
|| !partitionDescriptor.getPartitionType().isBlocking();
CompletableFuture<ResultPartitionDeploymentDescriptor> partitionRegistration = shuffleDescriptorFuture
.thenApply(shuffleDescriptor -> new ResultPartitionDeploymentDescriptor(
partitionDescriptor,
shuffleDescriptor,
maxParallelism,
lazyScheduling,
releasePartitionOnConsumption
? ShuffleDescriptor.ReleaseType.AUTO
: ShuffleDescriptor.ReleaseType.MANUAL));
partitionRegistrations.add(partitionRegistration);
}
return FutureUtils.combineAll(partitionRegistrations).thenApply(rpdds -> {
Map<IntermediateResultPartitionID, ResultPartitionDeploymentDescriptor> producedPartitions =
new LinkedHashMap<>(partitions.size());
rpdds.forEach(rpdd -> producedPartitions.put(rpdd.getPartitionId(), rpdd));
return producedPartitions;
});
}
private static int getPartitionMaxParallelism(IntermediateResultPartition partition) {
final List<List<ExecutionEdge>> consumers = partition.getConsumers();
int maxParallelism = KeyGroupRangeAssignment.UPPER_BOUND_MAX_PARALLELISM;
if (!consumers.isEmpty()) {
List<ExecutionEdge> consumer = consumers.get(0);
ExecutionJobVertex consumerVertex = consumer.get(0).getTarget().getJobVertex();
maxParallelism = consumerVertex.getMaxParallelism();
}
return maxParallelism;
}
/**
* Deploys the execution to the previously assigned resource.
*
* @throws JobException if the execution cannot be deployed to the assigned resource
*/
public void deploy() throws JobException {
assertRunningInJobMasterMainThread();
final LogicalSlot slot = assignedResource;
checkNotNull(slot, "In order to deploy the execution we first have to assign a resource via tryAssignResource.");
if (!slot.isAlive()) {
throw new JobException("Target slot (TaskManager) for deployment is no longer alive.");
}
ExecutionState previous = this.state;
if (previous == SCHEDULED || previous == CREATED) {
if (!transitionState(previous, DEPLOYING)) {
throw new IllegalStateException("Cannot deploy task: Concurrent deployment call race.");
}
}
else {
throw new IllegalStateException("The vertex must be in CREATED or SCHEDULED state to be deployed. Found state " + previous);
}
if (this != slot.getPayload()) {
throw new IllegalStateException(
String.format("The execution %s has not been assigned to the assigned slot.", this));
}
try {
if (this.state != DEPLOYING) {
slot.releaseSlot(new FlinkException("Actual state of execution " + this + " (" + state + ") does not match expected state DEPLOYING."));
return;
}
if (LOG.isInfoEnabled()) {
LOG.info(String.format("Deploying %s (attempt
attemptNumber, getAssignedResourceLocation()));
}
final TaskDeploymentDescriptor deployment = TaskDeploymentDescriptorFactory
.fromExecutionVertex(vertex, attemptNumber)
.createDeploymentDescriptor(
slot.getAllocationId(),
slot.getPhysicalSlotNumber(),
taskRestore,
producedPartitions.values());
taskRestore = null;
final TaskManagerGateway taskManagerGateway = slot.getTaskManagerGateway();
final ComponentMainThreadExecutor jobMasterMainThreadExecutor =
vertex.getExecutionGraph().getJobMasterMainThreadExecutor();
CompletableFuture.supplyAsync(() -> taskManagerGateway.submitTask(deployment, rpcTimeout), executor)
.thenCompose(Function.identity())
.whenCompleteAsync(
(ack, failure) -> {
if (failure != null) {
if (failure instanceof TimeoutException) {
String taskname = vertex.getTaskNameWithSubtaskIndex() + " (" + attemptId + ')';
markFailed(new Exception(
"Cannot deploy task " + taskname + " - TaskManager (" + getAssignedResourceLocation()
+ ") not responding after a rpcTimeout of " + rpcTimeout, failure));
} else {
markFailed(failure);
}
}
},
jobMasterMainThreadExecutor);
}
catch (Throwable t) {
markFailed(t);
ExceptionUtils.rethrow(t);
}
}
public void cancel() {
assertRunningInJobMasterMainThread();
while (true) {
ExecutionState current = this.state;
if (current == CANCELING || current == CANCELED) {
return;
}
else if (current == RUNNING || current == DEPLOYING) {
if (startCancelling(NUM_CANCEL_CALL_TRIES)) {
return;
}
}
else if (current == FINISHED || current == FAILED) {
return;
}
else if (current == CREATED || current == SCHEDULED) {
if (cancelAtomically()) {
return;
}
}
else {
throw new IllegalStateException(current.name());
}
}
}
public CompletableFuture<?> suspend() {
switch(state) {
case RUNNING:
case DEPLOYING:
case CREATED:
case SCHEDULED:
if (!cancelAtomically()) {
throw new IllegalStateException(
String.format("Could not directly go to %s from %s.", CANCELED.name(), state.name()));
}
break;
case CANCELING:
completeCancelling();
break;
case FINISHED:
case FAILED:
case CANCELED:
break;
default:
throw new IllegalStateException(state.name());
}
return releaseFuture;
}
private void scheduleConsumer(ExecutionVertex consumerVertex) {
try {
final ExecutionGraph executionGraph = consumerVertex.getExecutionGraph();
consumerVertex.scheduleForExecution(
executionGraph.getSlotProvider(),
executionGraph.isQueuedSchedulingAllowed(),
LocationPreferenceConstraint.ANY,
Collections.emptySet());
} catch (Throwable t) {
consumerVertex.fail(new IllegalStateException("Could not schedule consumer " +
"vertex " + consumerVertex, t));
}
}
void scheduleOrUpdateConsumers(List<List<ExecutionEdge>> allConsumers) {
assertRunningInJobMasterMainThread();
final int numConsumers = allConsumers.size();
if (numConsumers > 1) {
fail(new IllegalStateException("Currently, only a single consumer group per partition is supported."));
} else if (numConsumers == 0) {
return;
}
for (ExecutionEdge edge : allConsumers.get(0)) {
final ExecutionVertex consumerVertex = edge.getTarget();
final Execution consumer = consumerVertex.getCurrentExecutionAttempt();
final ExecutionState consumerState = consumer.getState();
if (consumerState == CREATED) {
if (consumerVertex.getInputDependencyConstraint() == InputDependencyConstraint.ANY ||
consumerVertex.checkInputDependencyConstraints()) {
scheduleConsumer(consumerVertex);
}
}
else if (consumerState == DEPLOYING || consumerState == RUNNING) {
final PartitionInfo partitionInfo = createPartitionInfo(edge);
if (consumerState == DEPLOYING) {
consumerVertex.cachePartitionInfo(partitionInfo);
} else {
consumer.sendUpdatePartitionInfoRpcCall(Collections.singleton(partitionInfo));
}
}
}
}
private static PartitionInfo createPartitionInfo(ExecutionEdge executionEdge) {
IntermediateDataSetID intermediateDataSetID = executionEdge.getSource().getIntermediateResult().getId();
ShuffleDescriptor shuffleDescriptor = getConsumedPartitionShuffleDescriptor(executionEdge, false);
return new PartitionInfo(intermediateDataSetID, shuffleDescriptor);
}
/**
* This method fails the vertex due to an external condition. The task will move to state FAILED.
* If the task was in state RUNNING or DEPLOYING before, it will send a cancel call to the TaskManager.
*
* @param t The exception that caused the task to fail.
*/
@Override
public void fail(Throwable t) {
processFail(t, false);
}
/**
* Request a stack trace sample from the task of this execution.
*
* @param sampleId of the stack trace sample
* @param numSamples the sample should contain
* @param delayBetweenSamples to wait
* @param maxStackTraceDepth of the samples
* @param timeout until the request times out
* @return Future stack trace sample response
*/
public CompletableFuture<StackTraceSampleResponse> requestStackTraceSample(
int sampleId,
int numSamples,
Time delayBetweenSamples,
int maxStackTraceDepth,
Time timeout) {
final LogicalSlot slot = assignedResource;
if (slot != null) {
final TaskManagerGateway taskManagerGateway = slot.getTaskManagerGateway();
return taskManagerGateway.requestStackTraceSample(
attemptId,
sampleId,
numSamples,
delayBetweenSamples,
maxStackTraceDepth,
timeout);
} else {
return FutureUtils.completedExceptionally(new Exception("The execution has no slot assigned."));
}
}
/**
* Notify the task of this execution about a completed checkpoint.
*
* @param checkpointId of the completed checkpoint
* @param timestamp of the completed checkpoint
*/
public void notifyCheckpointComplete(long checkpointId, long timestamp) {
final LogicalSlot slot = assignedResource;
if (slot != null) {
final TaskManagerGateway taskManagerGateway = slot.getTaskManagerGateway();
taskManagerGateway.notifyCheckpointComplete(attemptId, getVertex().getJobId(), checkpointId, timestamp);
} else {
LOG.debug("The execution has no slot assigned. This indicates that the execution is " +
"no longer running.");
}
}
/**
* Trigger a new checkpoint on the task of this execution.
*
* @param checkpointId of th checkpoint to trigger
* @param timestamp of the checkpoint to trigger
* @param checkpointOptions of the checkpoint to trigger
*/
public void triggerCheckpoint(long checkpointId, long timestamp, CheckpointOptions checkpointOptions) {
triggerCheckpointHelper(checkpointId, timestamp, checkpointOptions, false);
}
/**
* Trigger a new checkpoint on the task of this execution.
*
* @param checkpointId of th checkpoint to trigger
* @param timestamp of the checkpoint to trigger
* @param checkpointOptions of the checkpoint to trigger
* @param advanceToEndOfEventTime Flag indicating if the source should inject a {@code MAX_WATERMARK} in the pipeline
* to fire any registered event-time timers
*/
public void triggerSynchronousSavepoint(long checkpointId, long timestamp, CheckpointOptions checkpointOptions, boolean advanceToEndOfEventTime) {
triggerCheckpointHelper(checkpointId, timestamp, checkpointOptions, advanceToEndOfEventTime);
}
private void triggerCheckpointHelper(long checkpointId, long timestamp, CheckpointOptions checkpointOptions, boolean advanceToEndOfEventTime) {
final CheckpointType checkpointType = checkpointOptions.getCheckpointType();
if (advanceToEndOfEventTime && !(checkpointType.isSynchronous() && checkpointType.isSavepoint())) {
throw new IllegalArgumentException("Only synchronous savepoints are allowed to advance the watermark to MAX.");
}
final LogicalSlot slot = assignedResource;
if (slot != null) {
final TaskManagerGateway taskManagerGateway = slot.getTaskManagerGateway();
taskManagerGateway.triggerCheckpoint(attemptId, getVertex().getJobId(), checkpointId, timestamp, checkpointOptions, advanceToEndOfEventTime);
} else {
LOG.debug("The execution has no slot assigned. This indicates that the execution is no longer running.");
}
}
/**
* This method marks the task as failed, but will make no attempt to remove task execution from the task manager.
* It is intended for cases where the task is known not to be running, or then the TaskManager reports failure
* (in which case it has already removed the task).
*
* @param t The exception that caused the task to fail.
*/
void markFailed(Throwable t) {
processFail(t, true);
}
void markFailed(Throwable t, Map<String, Accumulator<?, ?>> userAccumulators, IOMetrics metrics) {
processFail(t, true, userAccumulators, metrics);
}
@VisibleForTesting
void markFinished() {
markFinished(null, null);
}
private boolean cancelAtomically() {
if (startCancelling(0)) {
completeCancelling();
return true;
} else {
return false;
}
}
private boolean startCancelling(int numberCancelRetries) {
if (transitionState(state, CANCELING)) {
taskManagerLocationFuture.cancel(false);
sendCancelRpcCall(numberCancelRetries);
return true;
} else {
return false;
}
}
void completeCancelling() {
completeCancelling(null, null);
}
void completeCancelling(Map<String, Accumulator<?, ?>> userAccumulators, IOMetrics metrics) {
while (true) {
ExecutionState current = this.state;
if (current == CANCELED) {
return;
}
else if (current == CANCELING || current == RUNNING || current == DEPLOYING) {
updateAccumulatorsAndMetrics(userAccumulators, metrics);
if (transitionState(current, CANCELED)) {
finishCancellation();
return;
}
}
else {
if (current != FAILED) {
String message = String.format("Asynchronous race: Found %s in state %s after successful cancel call.", vertex.getTaskNameWithSubtaskIndex(), state);
LOG.error(message);
vertex.getExecutionGraph().failGlobal(new Exception(message));
}
return;
}
}
}
private void finishCancellation() {
releaseAssignedResource(new FlinkException("Execution " + this + " was cancelled."));
vertex.getExecutionGraph().deregisterExecution(this);
stopTrackingAndReleasePartitions();
}
void cachePartitionInfo(PartitionInfo partitionInfo) {
partitionInfos.add(partitionInfo);
}
private void sendPartitionInfos() {
if (!partitionInfos.isEmpty()) {
sendUpdatePartitionInfoRpcCall(new ArrayList<>(partitionInfos));
partitionInfos.clear();
}
}
private boolean processFail(Throwable t, boolean isCallback) {
return processFail(t, isCallback, null, null);
}
private boolean processFail(Throwable t, boolean isCallback, Map<String, Accumulator<?, ?>> userAccumulators, IOMetrics metrics) {
assertRunningInJobMasterMainThread();
while (true) {
ExecutionState current = this.state;
if (current == FAILED) {
return false;
}
if (current == CANCELED || current == FINISHED) {
if (LOG.isDebugEnabled()) {
LOG.debug("Ignoring transition of vertex {} to {} while being {}.", getVertexWithAttempt(), FAILED, current);
}
return false;
}
if (current == CANCELING) {
completeCancelling(userAccumulators, metrics);
return false;
}
if (transitionState(current, FAILED, t)) {
this.failureCause = t;
updateAccumulatorsAndMetrics(userAccumulators, metrics);
releaseAssignedResource(t);
vertex.getExecutionGraph().deregisterExecution(this);
stopTrackingAndReleasePartitions();
if (!isCallback && (current == RUNNING || current == DEPLOYING)) {
if (LOG.isDebugEnabled()) {
LOG.debug("Sending out cancel request, to remove task execution from TaskManager.");
}
try {
if (assignedResource != null) {
sendCancelRpcCall(NUM_CANCEL_CALL_TRIES);
}
} catch (Throwable tt) {
LOG.error("Error triggering cancel call while marking task {} as failed.", getVertex().getTaskNameWithSubtaskIndex(), tt);
}
}
return true;
}
}
}
boolean switchToRunning() {
if (transitionState(DEPLOYING, RUNNING)) {
sendPartitionInfos();
return true;
}
else {
ExecutionState currentState = this.state;
if (currentState == FINISHED || currentState == CANCELED) {
}
else if (currentState == CANCELING || currentState == FAILED) {
if (LOG.isDebugEnabled()) {
LOG.debug("Concurrent canceling/failing of {} while deployment was in progress.", getVertexWithAttempt());
}
sendCancelRpcCall(NUM_CANCEL_CALL_TRIES);
}
else {
String message = String.format("Concurrent unexpected state transition of task %s to %s while deployment was in progress.",
getVertexWithAttempt(), currentState);
if (LOG.isDebugEnabled()) {
LOG.debug(message);
}
sendCancelRpcCall(NUM_CANCEL_CALL_TRIES);
markFailed(new Exception(message));
}
return false;
}
}
/**
* This method sends a CancelTask message to the instance of the assigned slot.
*
* <p>The sending is tried up to NUM_CANCEL_CALL_TRIES times.
*/
private void sendCancelRpcCall(int numberRetries) {
final LogicalSlot slot = assignedResource;
if (slot != null) {
final TaskManagerGateway taskManagerGateway = slot.getTaskManagerGateway();
final ComponentMainThreadExecutor jobMasterMainThreadExecutor =
getVertex().getExecutionGraph().getJobMasterMainThreadExecutor();
CompletableFuture<Acknowledge> cancelResultFuture = FutureUtils.retry(
() -> taskManagerGateway.cancelTask(attemptId, rpcTimeout),
numberRetries,
jobMasterMainThreadExecutor);
cancelResultFuture.whenComplete(
(ack, failure) -> {
if (failure != null) {
fail(new Exception("Task could not be canceled.", failure));
}
});
}
}
private void startTrackingPartitions(final ResourceID taskExecutorId, final Collection<ResultPartitionDeploymentDescriptor> partitions) {
PartitionTracker partitionTracker = vertex.getExecutionGraph().getPartitionTracker();
for (ResultPartitionDeploymentDescriptor partition : partitions) {
partitionTracker.startTrackingPartition(
taskExecutorId,
partition);
}
}
private void stopTrackingAndReleasePartitions() {
LOG.info("Discarding the results produced by task execution {}.", attemptId);
if (producedPartitions != null && producedPartitions.size() > 0) {
final PartitionTracker partitionTracker = getVertex().getExecutionGraph().getPartitionTracker();
final List<ResultPartitionID> producedPartitionIds = producedPartitions.values().stream()
.map(ResultPartitionDeploymentDescriptor::getShuffleDescriptor)
.map(ShuffleDescriptor::getResultPartitionID)
.collect(Collectors.toList());
partitionTracker.stopTrackingAndReleasePartitions(producedPartitionIds);
}
}
/**
* Update the partition infos on the assigned resource.
*
* @param partitionInfos for the remote task
*/
private void sendUpdatePartitionInfoRpcCall(
final Iterable<PartitionInfo> partitionInfos) {
final LogicalSlot slot = assignedResource;
if (slot != null) {
final TaskManagerGateway taskManagerGateway = slot.getTaskManagerGateway();
final TaskManagerLocation taskManagerLocation = slot.getTaskManagerLocation();
CompletableFuture<Acknowledge> updatePartitionsResultFuture = taskManagerGateway.updatePartitions(attemptId, partitionInfos, rpcTimeout);
updatePartitionsResultFuture.whenCompleteAsync(
(ack, failure) -> {
if (failure != null) {
fail(new IllegalStateException("Update task on TaskManager " + taskManagerLocation +
" failed due to:", failure));
}
}, getVertex().getExecutionGraph().getJobMasterMainThreadExecutor());
}
}
/**
* Releases the assigned resource and completes the release future
* once the assigned resource has been successfully released.
*
* @param cause for the resource release, null if none
*/
private void releaseAssignedResource(@Nullable Throwable cause) {
assertRunningInJobMasterMainThread();
final LogicalSlot slot = assignedResource;
if (slot != null) {
ComponentMainThreadExecutor jobMasterMainThreadExecutor =
getVertex().getExecutionGraph().getJobMasterMainThreadExecutor();
slot.releaseSlot(cause)
.whenComplete((Object ignored, Throwable throwable) -> {
jobMasterMainThreadExecutor.assertRunningInMainThread();
if (throwable != null) {
releaseFuture.completeExceptionally(throwable);
} else {
releaseFuture.complete(null);
}
});
} else {
releaseFuture.complete(null);
}
}
/**
* Calculates the preferred locations based on the location preference constraint.
*
* @param locationPreferenceConstraint constraint for the location preference
* @return Future containing the collection of preferred locations. This might not be completed if not all inputs
* have been a resource assigned.
*/
@VisibleForTesting
public CompletableFuture<Collection<TaskManagerLocation>> calculatePreferredLocations(LocationPreferenceConstraint locationPreferenceConstraint) {
final Collection<CompletableFuture<TaskManagerLocation>> preferredLocationFutures = getVertex().getPreferredLocations();
final CompletableFuture<Collection<TaskManagerLocation>> preferredLocationsFuture;
switch(locationPreferenceConstraint) {
case ALL:
preferredLocationsFuture = FutureUtils.combineAll(preferredLocationFutures);
break;
case ANY:
final ArrayList<TaskManagerLocation> completedTaskManagerLocations = new ArrayList<>(preferredLocationFutures.size());
for (CompletableFuture<TaskManagerLocation> preferredLocationFuture : preferredLocationFutures) {
if (preferredLocationFuture.isDone() && !preferredLocationFuture.isCompletedExceptionally()) {
final TaskManagerLocation taskManagerLocation = preferredLocationFuture.getNow(null);
if (taskManagerLocation == null) {
throw new FlinkRuntimeException("TaskManagerLocationFuture was completed with null. This indicates a programming bug.");
}
completedTaskManagerLocations.add(taskManagerLocation);
}
}
preferredLocationsFuture = CompletableFuture.completedFuture(completedTaskManagerLocations);
break;
default:
throw new RuntimeException("Unknown LocationPreferenceConstraint " + locationPreferenceConstraint + '.');
}
return preferredLocationsFuture;
}
private boolean transitionState(ExecutionState currentState, ExecutionState targetState) {
return transitionState(currentState, targetState, null);
}
private boolean transitionState(ExecutionState currentState, ExecutionState targetState, Throwable error) {
if (currentState.isTerminal()) {
throw new IllegalStateException("Cannot leave terminal state " + currentState + " to transition to " + targetState + '.');
}
if (STATE_UPDATER.compareAndSet(this, currentState, targetState)) {
markTimestamp(targetState);
if (error == null) {
LOG.info("{} ({}) switched from {} to {}.", getVertex().getTaskNameWithSubtaskIndex(), getAttemptId(), currentState, targetState);
} else {
LOG.info("{} ({}) switched from {} to {}.", getVertex().getTaskNameWithSubtaskIndex(), getAttemptId(), currentState, targetState, error);
}
if (targetState.isTerminal()) {
terminalStateFuture.complete(targetState);
}
try {
vertex.notifyStateTransition(this, targetState, error);
}
catch (Throwable t) {
LOG.error("Error while notifying execution graph of execution state transition.", t);
}
return true;
} else {
return false;
}
}
private void markTimestamp(ExecutionState state) {
markTimestamp(state, System.currentTimeMillis());
}
private void markTimestamp(ExecutionState state, long timestamp) {
this.stateTimestamps[state.ordinal()] = timestamp;
}
public String getVertexWithAttempt() {
return vertex.getTaskNameWithSubtaskIndex() + " - execution
}
/**
* Update accumulators (discarded when the Execution has already been terminated).
* @param userAccumulators the user accumulators
*/
public void setAccumulators(Map<String, Accumulator<?, ?>> userAccumulators) {
synchronized (accumulatorLock) {
if (!state.isTerminal()) {
this.userAccumulators = userAccumulators;
}
}
}
public Map<String, Accumulator<?, ?>> getUserAccumulators() {
return userAccumulators;
}
@Override
public StringifiedAccumulatorResult[] getUserAccumulatorsStringified() {
Map<String, OptionalFailure<Accumulator<?, ?>>> accumulators =
userAccumulators == null ?
null :
userAccumulators.entrySet()
.stream()
.collect(Collectors.toMap(Map.Entry::getKey, entry -> OptionalFailure.of(entry.getValue())));
return StringifiedAccumulatorResult.stringifyAccumulatorResults(accumulators);
}
@Override
public int getParallelSubtaskIndex() {
return getVertex().getParallelSubtaskIndex();
}
@Override
public IOMetrics getIOMetrics() {
return ioMetrics;
}
private void updateAccumulatorsAndMetrics(Map<String, Accumulator<?, ?>> userAccumulators, IOMetrics metrics) {
if (userAccumulators != null) {
synchronized (accumulatorLock) {
this.userAccumulators = userAccumulators;
}
}
if (metrics != null) {
this.ioMetrics = metrics;
}
}
@Override
public String toString() {
final LogicalSlot slot = assignedResource;
return String.format("Attempt
(slot == null ? "(unassigned)" : slot), state);
}
@Override
public ArchivedExecution archive() {
return new ArchivedExecution(this);
}
private void assertRunningInJobMasterMainThread() {
vertex.getExecutionGraph().assertRunningInJobMasterMainThread();
}
} |
why not use executeSql function in StatisticUtil | public void execute() throws Exception {
tbl.readLock();
Map<String, String> params = new HashMap<>();
params.put("internalDB", StatisticConstants.STATISTIC_DB_NAME);
params.put("columnStatTbl", StatisticConstants.STATISTIC_TBL_NAME);
params.put("catalogId", String.valueOf(catalog.getId()));
params.put("dbId", String.valueOf(db.getId()));
params.put("tblId", String.valueOf(tbl.getId()));
params.put("colId", String.valueOf(info.colName));
params.put("dataSizeFunction", getDataSizeFunction());
params.put("dbName", info.dbName);
params.put("colName", String.valueOf(info.colName));
params.put("tblName", String.valueOf(info.tblName));
List<String> partitionAnalysisSQLs = new ArrayList<>();
try {
tbl.readLock();
Set<String> partNames = tbl.getPartitionNames();
for (String partName : partNames) {
Partition part = tbl.getPartition(partName);
if (part == null) {
continue;
}
params.put("partId", String.valueOf(tbl.getPartition(partName).getId()));
params.put("partName", String.valueOf(partName));
StringSubstitutor stringSubstitutor = new StringSubstitutor(params);
partitionAnalysisSQLs.add(stringSubstitutor.replace(ANALYZE_PARTITION_SQL_TEMPLATE));
}
} finally {
tbl.readUnlock();
}
for (String sql : partitionAnalysisSQLs) {
ConnectContext connectContext = StatisticUtil.buildConnectContext();
this.stmtExecutor = new StmtExecutor(connectContext, sql);
this.stmtExecutor.execute();
}
params.remove("partId");
params.put("type", col.getType().toString());
StringSubstitutor stringSubstitutor = new StringSubstitutor(params);
String sql = stringSubstitutor.replace(ANALYZE_COLUMN_SQL_TEMPLATE);
ConnectContext connectContext = StatisticUtil.buildConnectContext();
this.stmtExecutor = new StmtExecutor(connectContext, sql);
this.stmtExecutor.execute();
} | this.stmtExecutor = new StmtExecutor(connectContext, sql); | public void execute() throws Exception {
Map<String, String> params = new HashMap<>();
params.put("internalDB", StatisticConstants.STATISTIC_DB_NAME);
params.put("columnStatTbl", StatisticConstants.STATISTIC_TBL_NAME);
params.put("catalogId", String.valueOf(catalog.getId()));
params.put("dbId", String.valueOf(db.getId()));
params.put("tblId", String.valueOf(tbl.getId()));
params.put("colId", String.valueOf(info.colName));
params.put("dataSizeFunction", getDataSizeFunction());
params.put("dbName", info.dbName);
params.put("colName", String.valueOf(info.colName));
params.put("tblName", String.valueOf(info.tblName));
List<String> partitionAnalysisSQLs = new ArrayList<>();
try {
tbl.readLock();
Set<String> partNames = tbl.getPartitionNames();
for (String partName : partNames) {
Partition part = tbl.getPartition(partName);
if (part == null) {
continue;
}
params.put("partId", String.valueOf(tbl.getPartition(partName).getId()));
params.put("partName", String.valueOf(partName));
StringSubstitutor stringSubstitutor = new StringSubstitutor(params);
partitionAnalysisSQLs.add(stringSubstitutor.replace(ANALYZE_PARTITION_SQL_TEMPLATE));
}
} finally {
tbl.readUnlock();
}
for (String sql : partitionAnalysisSQLs) {
ConnectContext connectContext = StatisticsUtil.buildConnectContext();
this.stmtExecutor = new StmtExecutor(connectContext, sql);
this.stmtExecutor.execute();
}
params.remove("partId");
params.put("type", col.getType().toString());
StringSubstitutor stringSubstitutor = new StringSubstitutor(params);
String sql = stringSubstitutor.replace(ANALYZE_COLUMN_SQL_TEMPLATE);
ConnectContext connectContext = StatisticsUtil.buildConnectContext();
this.stmtExecutor = new StmtExecutor(connectContext, sql);
this.stmtExecutor.execute();
} | class AnalysisJob {
private final AnalysisJobScheduler analysisJobScheduler;
private final AnalysisJobInfo info;
private CatalogIf catalog;
private Database db;
private Table tbl;
private Column col;
private StmtExecutor stmtExecutor;
public AnalysisJob(AnalysisJobScheduler analysisJobScheduler, AnalysisJobInfo info) {
this.analysisJobScheduler = analysisJobScheduler;
this.info = info;
init(info);
}
private void init(AnalysisJobInfo info) {
catalog = Env.getCurrentEnv().getCatalogMgr().getCatalog(info.catalogName);
if (catalog == null) {
analysisJobScheduler.updateJobStatus(info.jobId, JobState.FAILED,
String.format("Catalog with name: %s not exists", info.dbName), System.currentTimeMillis());
return;
}
db = Env.getCurrentEnv().getInternalCatalog().getDb(info.dbName).orElse(null);
if (db == null) {
analysisJobScheduler.updateJobStatus(info.jobId, JobState.FAILED,
String.format("DB with name %s not exists", info.dbName), System.currentTimeMillis());
return;
}
tbl = db.getTable(info.tblName).orElse(null);
if (tbl == null) {
analysisJobScheduler.updateJobStatus(
info.jobId, JobState.FAILED,
String.format("Table with name %s not exists", info.tblName), System.currentTimeMillis());
}
col = tbl.getColumn(info.colName);
if (col == null) {
analysisJobScheduler.updateJobStatus(
info.jobId, JobState.FAILED, String.format("Column with name %s not exists", info.tblName),
System.currentTimeMillis());
}
}
private static final String ANALYZE_PARTITION_SQL_TEMPLATE = "INSERT INTO "
+ "${internalDB}.${columnStatTbl}"
+ " SELECT "
+ "CONCAT(${tblId}, '-', '${colId}', '-', ${partId}) AS id, "
+ "${catalogId} AS catalog_id, "
+ "${dbId} AS db_id, "
+ "${tblId} AS tbl_id, "
+ "'${colId}' AS col_id, "
+ "${partId} AS part_id, "
+ "COUNT(1) AS row_count, "
+ "NDV(${colName}) AS ndv, "
+ "SUM(CASE WHEN ${colName} IS NULL THEN 1 ELSE 0 END) AS null_count, "
+ "MIN(${colName}) AS min, "
+ "MAX(${colName}) AS max, "
+ "${dataSizeFunction} AS data_size, "
+ "NOW()"
+ "FROM `${dbName}`.`${tblName}` "
+ "PARTITION ${partName}";
private static final String ANALYZE_COLUMN_SQL_TEMPLATE = "INSERT INTO "
+ "${internalDB}.${columnStatTbl}"
+ " SELECT id, catalog_id, db_id, tbl_id, col_id, part_id, row_count, "
+ " ndv, null_count, min, max, data_size, update_time\n"
+ " FROM \n"
+ " (SELECT CONCAT(${tblId}, '-', '${colId}') AS id, "
+ " ${catalogId} AS catalog_id, "
+ " ${dbId} AS db_id, "
+ " ${tblId} AS tbl_id, "
+ " '${colId}' AS col_id, "
+ " NULL AS part_id, "
+ " SUM(count) AS row_count, \n"
+ " SUM(null_count) AS null_count, "
+ " MIN(CAST(min AS ${type})) AS min, "
+ " MAX(CAST(max AS ${type})) AS max, "
+ " SUM(data_size_in_bytes) AS data_size, "
+ " NOW() AS update_time\n"
+ " FROM ${internalDB}.${columnStatTbl}"
+ " WHERE ${internalDB}.${columnStatTbl}.db_id = '${dbId}' AND "
+ " ${internalDB}.${columnStatTbl}.tbl_id='${tblId}' AND "
+ " ${internalDB}.${columnStatTbl}.col_id='${colId}'"
+ " ) t1, \n"
+ " (SELECT NDV(${colName}) AS ndv FROM `${dbName}`.`${tblName}`) t2\n";
public String getDataSizeFunction() {
if (col.getType().isStringType()) {
return "SUM(LENGTH(${colName}))";
}
return "COUNT(1) * " + col.getType().getSlotSize();
}
public int getLastExecTime() {
return info.lastExecTimeInMs;
}
public void cancel() {
if (stmtExecutor != null) {
stmtExecutor.cancel();
}
analysisJobScheduler
.updateJobStatus(info.jobId, JobState.FAILED,
String.format("Job has been cancelled: %s", info.toString()), -1);
}
public void updateState(JobState jobState) {
info.updateState(jobState);
}
public long getJobId() {
return info.jobId;
}
} | class AnalysisJob {
private final AnalysisJobScheduler analysisJobScheduler;
private final AnalysisJobInfo info;
private CatalogIf catalog;
private Database db;
private Table tbl;
private Column col;
private StmtExecutor stmtExecutor;
public AnalysisJob(AnalysisJobScheduler analysisJobScheduler, AnalysisJobInfo info) {
this.analysisJobScheduler = analysisJobScheduler;
this.info = info;
init(info);
}
private void init(AnalysisJobInfo info) {
catalog = Env.getCurrentEnv().getCatalogMgr().getCatalog(info.catalogName);
if (catalog == null) {
analysisJobScheduler.updateJobStatus(info.jobId, JobState.FAILED,
String.format("Catalog with name: %s not exists", info.dbName), System.currentTimeMillis());
return;
}
db = Env.getCurrentEnv().getInternalCatalog().getDb(info.dbName).orElse(null);
if (db == null) {
analysisJobScheduler.updateJobStatus(info.jobId, JobState.FAILED,
String.format("DB with name %s not exists", info.dbName), System.currentTimeMillis());
return;
}
tbl = db.getTable(info.tblName).orElse(null);
if (tbl == null) {
analysisJobScheduler.updateJobStatus(
info.jobId, JobState.FAILED,
String.format("Table with name %s not exists", info.tblName), System.currentTimeMillis());
}
col = tbl.getColumn(info.colName);
if (col == null) {
analysisJobScheduler.updateJobStatus(
info.jobId, JobState.FAILED, String.format("Column with name %s not exists", info.tblName),
System.currentTimeMillis());
}
}
private static final String ANALYZE_PARTITION_SQL_TEMPLATE = "INSERT INTO "
+ "${internalDB}.${columnStatTbl}"
+ " SELECT "
+ "CONCAT(${tblId}, '-', '${colId}', '-', ${partId}) AS id, "
+ "${catalogId} AS catalog_id, "
+ "${dbId} AS db_id, "
+ "${tblId} AS tbl_id, "
+ "'${colId}' AS col_id, "
+ "${partId} AS part_id, "
+ "COUNT(1) AS row_count, "
+ "NDV(${colName}) AS ndv, "
+ "SUM(CASE WHEN ${colName} IS NULL THEN 1 ELSE 0 END) AS null_count, "
+ "MIN(${colName}) AS min, "
+ "MAX(${colName}) AS max, "
+ "${dataSizeFunction} AS data_size, "
+ "NOW()"
+ "FROM `${dbName}`.`${tblName}` "
+ "PARTITION ${partName}";
private static final String ANALYZE_COLUMN_SQL_TEMPLATE = "INSERT INTO "
+ "${internalDB}.${columnStatTbl}"
+ " SELECT id, catalog_id, db_id, tbl_id, col_id, part_id, row_count, "
+ " ndv, null_count, min, max, data_size, update_time\n"
+ " FROM \n"
+ " (SELECT CONCAT(${tblId}, '-', '${colId}') AS id, "
+ " ${catalogId} AS catalog_id, "
+ " ${dbId} AS db_id, "
+ " ${tblId} AS tbl_id, "
+ " '${colId}' AS col_id, "
+ " NULL AS part_id, "
+ " SUM(count) AS row_count, \n"
+ " SUM(null_count) AS null_count, "
+ " MIN(CAST(min AS ${type})) AS min, "
+ " MAX(CAST(max AS ${type})) AS max, "
+ " SUM(data_size_in_bytes) AS data_size, "
+ " NOW() AS update_time\n"
+ " FROM ${internalDB}.${columnStatTbl}"
+ " WHERE ${internalDB}.${columnStatTbl}.db_id = '${dbId}' AND "
+ " ${internalDB}.${columnStatTbl}.tbl_id='${tblId}' AND "
+ " ${internalDB}.${columnStatTbl}.col_id='${colId}'"
+ " ) t1, \n"
+ " (SELECT NDV(${colName}) AS ndv FROM `${dbName}`.`${tblName}`) t2\n";
private String getDataSizeFunction() {
if (col.getType().isStringType()) {
return "SUM(LENGTH(${colName}))";
}
return "COUNT(1) * " + col.getType().getSlotSize();
}
public int getLastExecTime() {
return info.lastExecTimeInMs;
}
public void cancel() {
if (stmtExecutor != null) {
stmtExecutor.cancel();
}
analysisJobScheduler
.updateJobStatus(info.jobId, JobState.FAILED,
String.format("Job has been cancelled: %s", info.toString()), -1);
}
public void updateState(JobState jobState) {
info.updateState(jobState);
}
public long getJobId() {
return info.jobId;
}
} |
Maybe make the comment a bit clearer? E.g. "Exception correctly aborts the entire pipeline because no error handler was registered?" | public void testBRHEnabledPTransform() {
PCollection<Integer> record = pipeline.apply(Create.of(1, 2, 3, 4));
record.apply(new BRHEnabledPTransform());
thrown.expect(RuntimeException.class);
pipeline.run();
} | thrown.expect(RuntimeException.class); | public void testBRHEnabledPTransform() {
PCollection<Integer> record = pipeline.apply(Create.of(1, 2, 3, 4));
record.apply(new BRHEnabledPTransform());
thrown.expect(RuntimeException.class);
pipeline.run();
} | class ErrorHandlerTest {
@Rule public final TestPipeline pipeline = TestPipeline.create();
@Rule public ExpectedException thrown = ExpectedException.none();
@Test
@Category(NeedsRunner.class)
public void testGoodErrorHandlerUsage() throws Exception {
try (ErrorHandler<String, PCollection<String>> eh =
pipeline.registerErrorHandler(new DummySinkTransform<>())) {}
pipeline.run();
}
@Test
public void testBadErrorHandlerUsage() {
pipeline.registerErrorHandler(new DummySinkTransform<PCollection<String>>());
thrown.expect(IllegalStateException.class);
pipeline.run();
}
@Test
@Test
@Category(NeedsRunner.class)
public void testErrorHandlerWithBRHTransform() throws Exception {
PCollection<Integer> record = pipeline.apply(Create.of(1, 2, 3, 4));
try (ErrorHandler<BadRecord, PCollection<BadRecord>> eh =
pipeline.registerErrorHandler(new DummySinkTransform<>())) {
record.apply(new BRHEnabledPTransform().withBadRecordHandler(eh));
}
pipeline.run();
}
public static class DummySinkTransform<T extends PCollection<?>> extends PTransform<T, T> {
@Override
public T expand(T input) {
return input;
}
}
} | class ErrorHandlerTest {
@Rule public final TestPipeline pipeline = TestPipeline.create();
@Rule public ExpectedException thrown = ExpectedException.none();
@Test
@Category(NeedsRunner.class)
public void testNoUsageErrorHandlerUsage() throws Exception {
try (BadRecordErrorHandler<PCollection<BadRecord>> eh =
pipeline.registerBadRecordErrorHandler(new DummySinkTransform<>())) {}
pipeline.run();
}
@Test
public void testUnclosedErrorHandlerUsage() {
pipeline.registerBadRecordErrorHandler(new DummySinkTransform<>());
thrown.expect(IllegalStateException.class);
pipeline.run();
}
@Test
@Test
@Category(NeedsRunner.class)
public void testErrorHandlerWithBRHTransform() throws Exception {
PCollection<Integer> record = pipeline.apply(Create.of(1, 2, 3, 4));
DummySinkTransform<BadRecord> transform = new DummySinkTransform<>();
ErrorHandler<BadRecord, PCollection<BadRecord>> eh =
pipeline.registerBadRecordErrorHandler(transform);
record.apply(new BRHEnabledPTransform().withBadRecordHandler(eh));
eh.close();
PCollection<BadRecord> badRecords = eh.getOutput();
PAssert.that(badRecords)
.satisfies(
(records) -> {
int count = 0;
for (BadRecord badRecord : records) {
count++;
Record r = null;
if (Objects.equals(badRecord.getRecord().getHumanReadableJsonRecord(), "1")) {
r =
Record.builder()
.setHumanReadableJsonRecord("1")
.setEncodedRecord(new byte[] {0, 0, 0, 1})
.setCoder("BigEndianIntegerCoder")
.build();
} else {
r =
Record.builder()
.setHumanReadableJsonRecord("3")
.setEncodedRecord(new byte[] {0, 0, 0, 3})
.setCoder("BigEndianIntegerCoder")
.build();
}
BadRecord.Builder expectedBuilder = BadRecord.builder().setRecord(r);
BadRecord.Failure.Builder failure =
BadRecord.Failure.builder()
.setException("java.lang.RuntimeException: Integer was odd")
.setDescription("Integer was odd");
failure.setExceptionStacktrace(badRecord.getFailure().getExceptionStacktrace());
expectedBuilder.setFailure(failure.build());
Assert.assertEquals("Expect failure to match", expectedBuilder.build(), badRecord);
}
Assert.assertEquals("Expect 2 errors", 2, count);
return null;
});
pipeline.run().waitUntilFinish();
}
public static class DummySinkTransform<T> extends PTransform<PCollection<T>, PCollection<T>> {
@Override
public PCollection<T> expand(PCollection<T> input) {
return input;
}
}
} |
Is the usual format we use in ballerina compilation errors? | private List<Executable> getExecutables(Class<?> clazz, String methodName, JMethodKind kind) {
if (kind == JMethodKind.CONSTRUCTOR) {
if (Modifier.isAbstract(clazz.getModifiers())) {
String classType = clazz.isInterface() ? "interface" : "abstract class";
throw new JInteropException(DiagnosticErrorCode.INSTANTIATION_ERROR,
"Cannot instantiate " + classType + " '" + clazz.getName() + "'");
}
return Arrays.asList(getConstructors(clazz));
} else {
List<Executable> list = new ArrayList<>();
for (Method method : getMethods(clazz)) {
if (method.getName().equals(methodName)) {
list.add(method);
}
}
return list;
}
} | "Cannot instantiate " + classType + " '" + clazz.getName() + "'"); | private List<Executable> getExecutables(Class<?> clazz, String methodName, JMethodKind kind) {
if (kind == JMethodKind.CONSTRUCTOR) {
if (Modifier.isAbstract(clazz.getModifiers())) {
throw new JInteropException(DiagnosticErrorCode.INSTANTIATION_ERROR,
"'" + clazz.getName() + "' is abstract, and cannot be instantiated");
}
return Arrays.asList(getConstructors(clazz));
} else {
List<Executable> list = new ArrayList<>();
for (Method method : getMethods(clazz)) {
if (method.getName().equals(methodName)) {
list.add(method);
}
}
return list;
}
} | class '" + jMethodRequest.declaringClass + "'");
}
} else {
return resolvedJMethods.get(0);
} | class '" + jMethodRequest.declaringClass + "'");
}
} else {
return resolvedJMethods.get(0);
} |
Yeah, I had initially done it that way to have booleans for each of the log levels but the performance improvements were not noticeable by doing that way vs the current approach. Moreover, `is*Enabled` methods in DefaultLogger are much faster than their logback counterparts (557664530 ops/sec vs 501866764 ops/sec). So, didn't want to micro-optimize. However, if you think evaluating it once in the constructor will improve overall readability, I am fine with making this change. | public DefaultLogger(String className) {
String classPath;
try {
classPath = Class.forName(className).getCanonicalName();
} catch (ClassNotFoundException e) {
classPath = className;
}
this.classPath = classPath;
this.configuredLogLevel =
LogLevel.fromString(Configuration.getGlobalConfiguration().get(Configuration.PROPERTY_AZURE_LOG_LEVEL))
.getLogLevel();
} | .getLogLevel(); | public DefaultLogger(String className) {
String classPath;
try {
classPath = Class.forName(className).getCanonicalName();
} catch (ClassNotFoundException e) {
classPath = className;
}
this.classPath = classPath;
int configuredLogLevel =
LogLevel.fromString(Configuration.getGlobalConfiguration().get(Configuration.PROPERTY_AZURE_LOG_LEVEL))
.getLogLevel();
isTraceEnabled = LogLevel.VERBOSE.getLogLevel() > configuredLogLevel;
isDebugEnabled = LogLevel.VERBOSE.getLogLevel() >= configuredLogLevel;
isInfoEnabled = LogLevel.INFORMATIONAL.getLogLevel() >= configuredLogLevel;
isWarnEnabled = LogLevel.WARNING.getLogLevel() >= configuredLogLevel;
isErrorEnabled = LogLevel.ERROR.getLogLevel() >= configuredLogLevel;
} | class name passes in.
*/ | class name passes in.
*/ |
@mkouba @Ladicek removed the `select()` invocation where it was superfluous. | public void testSyntheticObserver() {
MyObserver.EVENTS.clear();
Arc.container().beanManager().getEvent().select(String.class).fire("foo");
assertEquals(2, MyObserver.EVENTS.size(), "Events: " + MyObserver.EVENTS);
assertTrue(MyObserver.EVENTS.contains("foo"));
assertTrue(MyObserver.EVENTS.contains("foo_MyObserver"));
MyObserver.EVENTS.clear();
Arc.container().beanManager().getEvent().select(String.class, NamedLiteral.of("bla")).fire("foo");
assertEquals(3, MyObserver.EVENTS.size(), "Events: " + MyObserver.EVENTS);
assertTrue(MyObserver.EVENTS.contains("foo"));
assertTrue(MyObserver.EVENTS.contains("foo_MyObserver"));
assertTrue(MyObserver.EVENTS.contains("synthetic2"));
} | Arc.container().beanManager().getEvent().select(String.class).fire("foo"); | public void testSyntheticObserver() {
MyObserver.EVENTS.clear();
Arc.container().beanManager().getEvent().fire("foo");
assertEquals(2, MyObserver.EVENTS.size(), "Events: " + MyObserver.EVENTS);
assertTrue(MyObserver.EVENTS.contains("foo"));
assertTrue(MyObserver.EVENTS.contains("foo_MyObserver"));
MyObserver.EVENTS.clear();
Arc.container().beanManager().getEvent().select(String.class, NamedLiteral.of("bla")).fire("foo");
assertEquals(3, MyObserver.EVENTS.size(), "Events: " + MyObserver.EVENTS);
assertTrue(MyObserver.EVENTS.contains("foo"));
assertTrue(MyObserver.EVENTS.contains("foo_MyObserver"));
assertTrue(MyObserver.EVENTS.contains("synthetic2"));
} | class SyntheticObserverTest {
@RegisterExtension
public ArcTestContainer container = ArcTestContainer.builder().beanClasses(MyObserver.class, Named.class)
.observerRegistrars(new ObserverRegistrar() {
@Override
public void register(RegistrationContext context) {
context.configure().observedType(String.class).notify(mc -> {
ResultHandle eventContext = mc.getMethodParam(0);
ResultHandle event = mc.invokeInterfaceMethod(
MethodDescriptor.ofMethod(EventContext.class, "getEvent", Object.class), eventContext);
ResultHandle events = mc.readStaticField(FieldDescriptor.of(MyObserver.class, "EVENTS", List.class));
mc.invokeInterfaceMethod(MethodDescriptor.ofMethod(List.class, "add", boolean.class, Object.class),
events, event);
mc.returnValue(null);
}).done();
context.configure().observedType(String.class).addQualifier().annotation(Named.class)
.addValue("value", "bla").done()
.notify(mc -> {
ResultHandle events = mc
.readStaticField(FieldDescriptor.of(MyObserver.class, "EVENTS", List.class));
mc.invokeInterfaceMethod(
MethodDescriptor.ofMethod(List.class, "add", boolean.class, Object.class),
events, mc.load("synthetic2"));
mc.returnValue(null);
}).done();
}
}).build();
@Test
@Singleton
static class MyObserver {
public static final List<String> EVENTS = new CopyOnWriteArrayList<String>();
void test(@Observes String event) {
EVENTS.add(event + "_MyObserver");
}
}
} | class SyntheticObserverTest {
@RegisterExtension
public ArcTestContainer container = ArcTestContainer.builder().beanClasses(MyObserver.class, Named.class)
.observerRegistrars(new ObserverRegistrar() {
@Override
public void register(RegistrationContext context) {
context.configure().observedType(String.class).notify(mc -> {
ResultHandle eventContext = mc.getMethodParam(0);
ResultHandle event = mc.invokeInterfaceMethod(
MethodDescriptor.ofMethod(EventContext.class, "getEvent", Object.class), eventContext);
ResultHandle events = mc.readStaticField(FieldDescriptor.of(MyObserver.class, "EVENTS", List.class));
mc.invokeInterfaceMethod(MethodDescriptor.ofMethod(List.class, "add", boolean.class, Object.class),
events, event);
mc.returnValue(null);
}).done();
context.configure().observedType(String.class).addQualifier().annotation(Named.class)
.addValue("value", "bla").done()
.notify(mc -> {
ResultHandle events = mc
.readStaticField(FieldDescriptor.of(MyObserver.class, "EVENTS", List.class));
mc.invokeInterfaceMethod(
MethodDescriptor.ofMethod(List.class, "add", boolean.class, Object.class),
events, mc.load("synthetic2"));
mc.returnValue(null);
}).done();
}
}).build();
@Test
@Singleton
static class MyObserver {
public static final List<String> EVENTS = new CopyOnWriteArrayList<String>();
void test(@Observes String event) {
EVENTS.add(event + "_MyObserver");
}
}
} |
We can't rely on the `999-SNAPSHOT` version in the tests. | public void shouldImportConditionalDependency() throws IOException, URISyntaxException, InterruptedException {
final File projectDir = getProjectDir("conditional-test-project");
runGradleWrapper(projectDir, "clean", ":runner:quarkusBuild", "-Dquarkus.package.type=mutable-jar");
final File buildDir = new File(projectDir, "runner" + File.separator + "build");
final Path mainLib = buildDir.toPath().resolve("quarkus-app").resolve("lib").resolve("main");
assertThat(mainLib.resolve("org.acme.ext-a-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-b-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-c-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-e-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-d-1.0-SNAPSHOT.jar")).doesNotExist();
assertThat(mainLib.resolve("org.acme.transitive-dependency-1.0-SNAPSHOT.jar")).doesNotExist();
final Path deploymentLib = buildDir.toPath().resolve("quarkus-app").resolve("lib").resolve("deployment");
assertThat(deploymentLib.resolve("org.acme.transitive-dependency-1.0-SNAPSHOT.jar")).exists();
assertThat(deploymentLib.resolve("io.quarkus.quarkus-agroal-999-SNAPSHOT.jar")).doesNotExist();
} | assertThat(deploymentLib.resolve("io.quarkus.quarkus-agroal-999-SNAPSHOT.jar")).doesNotExist(); | public void shouldImportConditionalDependency() throws IOException, URISyntaxException, InterruptedException {
final File projectDir = getProjectDir("conditional-test-project");
runGradleWrapper(projectDir, "clean", ":runner:quarkusBuild", "-Dquarkus.package.type=mutable-jar");
final File buildDir = new File(projectDir, "runner" + File.separator + "build");
final Path mainLib = buildDir.toPath().resolve("quarkus-app").resolve("lib").resolve("main");
assertThat(mainLib.resolve("org.acme.ext-a-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-b-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-c-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-e-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-d-1.0-SNAPSHOT.jar")).doesNotExist();
assertThat(mainLib.resolve("org.acme.transitive-dependency-1.0-SNAPSHOT.jar")).doesNotExist();
final Path deploymentLib = buildDir.toPath().resolve("quarkus-app").resolve("lib").resolve("deployment");
assertThat(deploymentLib.resolve("org.acme.transitive-dependency-1.0-SNAPSHOT.jar")).exists();
assertThat(deploymentLib.resolve("io.quarkus.quarkus-agroal-" + getQuarkusVersion() + ".jar")).doesNotExist();
} | class ConditionalDependenciesTest extends QuarkusGradleWrapperTestBase {
@Test
@Order(1)
public void publishTestExtensions() throws IOException, InterruptedException, URISyntaxException {
File dependencyProject = getProjectDir("conditional-dependencies");
runGradleWrapper(dependencyProject, ":transitive-dependency:publishToMavenLocal",
":simple-dependency:publishToMavenLocal");
runGradleWrapper(dependencyProject,
":ext-a:runtime:publishToMavenLocal",
":ext-a:deployment:publishToMavenLocal",
":ext-b:runtime:publishToMavenLocal",
":ext-b:deployment:publishToMavenLocal",
":ext-c:runtime:publishToMavenLocal",
":ext-c:deployment:publishToMavenLocal",
":ext-d:runtime:publishToMavenLocal",
":ext-d:deployment:publishToMavenLocal",
":ext-e:runtime:publishToMavenLocal",
":ext-e:deployment:publishToMavenLocal",
":ext-f:runtime:publishToMavenLocal",
":ext-f:deployment:publishToMavenLocal",
":ext-g:runtime:publishToMavenLocal",
":ext-g:deployment:publishToMavenLocal",
":ext-h:runtime:publishToMavenLocal",
":ext-h:deployment:publishToMavenLocal",
":ext-i:runtime:publishToMavenLocal",
":ext-i:deployment:publishToMavenLocal",
":ext-j:runtime:publishToMavenLocal",
":ext-j:deployment:publishToMavenLocal",
":ext-k:runtime:publishToMavenLocal",
":ext-k:deployment:publishToMavenLocal",
":ext-l:runtime:publishToMavenLocal",
":ext-l:deployment:publishToMavenLocal",
":ext-m:runtime:publishToMavenLocal",
":ext-m:deployment:publishToMavenLocal",
":ext-n:runtime:publishToMavenLocal",
":ext-n:deployment:publishToMavenLocal",
":ext-o:runtime:publishToMavenLocal",
":ext-o:deployment:publishToMavenLocal",
":ext-p:runtime:publishToMavenLocal",
":ext-p:deployment:publishToMavenLocal",
":ext-r:runtime:publishToMavenLocal",
":ext-r:deployment:publishToMavenLocal",
":ext-s:runtime:publishToMavenLocal",
":ext-s:deployment:publishToMavenLocal",
":ext-t:runtime:publishToMavenLocal",
":ext-t:deployment:publishToMavenLocal",
":ext-u:runtime:publishToMavenLocal",
":ext-u:deployment:publishToMavenLocal");
}
@Test
@Order(2)
@Test
@Order(3)
public void shouldNotImportConditionalDependency() throws IOException, URISyntaxException, InterruptedException {
final File projectDir = getProjectDir("conditional-test-project");
runGradleWrapper(projectDir, "clean", ":runner-with-exclude:quarkusBuild");
final File buildDir = new File(projectDir, "runner-with-exclude" + File.separator + "build");
final Path mainLib = buildDir.toPath().resolve("quarkus-app").resolve("lib").resolve("main");
assertThat(mainLib.resolve("org.acme.ext-a-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-b-1.0-SNAPSHOT.jar")).doesNotExist();
assertThat(mainLib.resolve("org.acme.ext-c-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-e-1.0-SNAPSHOT.jar")).doesNotExist();
assertThat(mainLib.resolve("org.acme.ext-d-1.0-SNAPSHOT.jar")).doesNotExist();
}
@Test
@Order(4)
public void shouldNotFailIfConditionalDependencyIsExplicitlyDeclared()
throws IOException, URISyntaxException, InterruptedException {
final File projectDir = getProjectDir("conditional-test-project");
runGradleWrapper(projectDir, "clean", ":runner-with-explicit-import:quarkusBuild");
final File buildDir = new File(projectDir, "runner-with-explicit-import" + File.separator + "build");
final Path mainLib = buildDir.toPath().resolve("quarkus-app").resolve("lib").resolve("main");
assertThat(mainLib.resolve("org.acme.ext-a-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-b-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-c-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-e-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-d-1.0-SNAPSHOT.jar")).doesNotExist();
}
@Test
@Order(5)
public void scenarioTwo() throws Exception {
final File projectDir = getProjectDir("conditional-test-project");
runGradleWrapper(projectDir, "clean", ":scenario-two:quarkusBuild", "-Dquarkus.package.type=mutable-jar");
final File buildDir = new File(projectDir, "scenario-two" + File.separator + "build");
final Path mainLib = buildDir.toPath().resolve("quarkus-app").resolve("lib").resolve("main");
assertThat(mainLib.resolve("org.acme.ext-f-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-g-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-h-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-i-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-j-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-k-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-l-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-m-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-n-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-o-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-p-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-r-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-s-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-t-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-u-1.0-SNAPSHOT.jar")).exists();
final Path deploymentLib = buildDir.toPath().resolve("quarkus-app").resolve("lib").resolve("deployment");
assertThat(deploymentLib.resolve("org.acme.ext-f-deployment-1.0-SNAPSHOT.jar")).exists();
assertThat(deploymentLib.resolve("org.acme.ext-g-deployment-1.0-SNAPSHOT.jar")).exists();
assertThat(deploymentLib.resolve("org.acme.ext-h-deployment-1.0-SNAPSHOT.jar")).exists();
assertThat(deploymentLib.resolve("org.acme.ext-i-deployment-1.0-SNAPSHOT.jar")).exists();
assertThat(deploymentLib.resolve("org.acme.ext-j-deployment-1.0-SNAPSHOT.jar")).exists();
assertThat(deploymentLib.resolve("org.acme.ext-k-deployment-1.0-SNAPSHOT.jar")).exists();
assertThat(deploymentLib.resolve("org.acme.ext-l-deployment-1.0-SNAPSHOT.jar")).exists();
assertThat(deploymentLib.resolve("org.acme.ext-m-deployment-1.0-SNAPSHOT.jar")).exists();
assertThat(deploymentLib.resolve("org.acme.ext-n-deployment-1.0-SNAPSHOT.jar")).exists();
assertThat(deploymentLib.resolve("org.acme.ext-o-deployment-1.0-SNAPSHOT.jar")).exists();
assertThat(deploymentLib.resolve("org.acme.ext-p-deployment-1.0-SNAPSHOT.jar")).exists();
assertThat(deploymentLib.resolve("org.acme.ext-r-deployment-1.0-SNAPSHOT.jar")).exists();
assertThat(deploymentLib.resolve("org.acme.ext-s-deployment-1.0-SNAPSHOT.jar")).exists();
assertThat(deploymentLib.resolve("org.acme.ext-t-deployment-1.0-SNAPSHOT.jar")).exists();
assertThat(deploymentLib.resolve("org.acme.ext-u-deployment-1.0-SNAPSHOT.jar")).exists();
}
} | class ConditionalDependenciesTest extends QuarkusGradleWrapperTestBase {
@Test
@Order(1)
public void publishTestExtensions() throws IOException, InterruptedException, URISyntaxException {
File dependencyProject = getProjectDir("conditional-dependencies");
runGradleWrapper(dependencyProject, ":transitive-dependency:publishToMavenLocal",
":simple-dependency:publishToMavenLocal");
runGradleWrapper(dependencyProject,
":ext-a:runtime:publishToMavenLocal",
":ext-a:deployment:publishToMavenLocal",
":ext-b:runtime:publishToMavenLocal",
":ext-b:deployment:publishToMavenLocal",
":ext-c:runtime:publishToMavenLocal",
":ext-c:deployment:publishToMavenLocal",
":ext-d:runtime:publishToMavenLocal",
":ext-d:deployment:publishToMavenLocal",
":ext-e:runtime:publishToMavenLocal",
":ext-e:deployment:publishToMavenLocal",
":ext-f:runtime:publishToMavenLocal",
":ext-f:deployment:publishToMavenLocal",
":ext-g:runtime:publishToMavenLocal",
":ext-g:deployment:publishToMavenLocal",
":ext-h:runtime:publishToMavenLocal",
":ext-h:deployment:publishToMavenLocal",
":ext-i:runtime:publishToMavenLocal",
":ext-i:deployment:publishToMavenLocal",
":ext-j:runtime:publishToMavenLocal",
":ext-j:deployment:publishToMavenLocal",
":ext-k:runtime:publishToMavenLocal",
":ext-k:deployment:publishToMavenLocal",
":ext-l:runtime:publishToMavenLocal",
":ext-l:deployment:publishToMavenLocal",
":ext-m:runtime:publishToMavenLocal",
":ext-m:deployment:publishToMavenLocal",
":ext-n:runtime:publishToMavenLocal",
":ext-n:deployment:publishToMavenLocal",
":ext-o:runtime:publishToMavenLocal",
":ext-o:deployment:publishToMavenLocal",
":ext-p:runtime:publishToMavenLocal",
":ext-p:deployment:publishToMavenLocal",
":ext-r:runtime:publishToMavenLocal",
":ext-r:deployment:publishToMavenLocal",
":ext-s:runtime:publishToMavenLocal",
":ext-s:deployment:publishToMavenLocal",
":ext-t:runtime:publishToMavenLocal",
":ext-t:deployment:publishToMavenLocal",
":ext-u:runtime:publishToMavenLocal",
":ext-u:deployment:publishToMavenLocal");
}
@Test
@Order(2)
@Test
@Order(3)
public void shouldNotImportConditionalDependency() throws IOException, URISyntaxException, InterruptedException {
final File projectDir = getProjectDir("conditional-test-project");
runGradleWrapper(projectDir, "clean", ":runner-with-exclude:quarkusBuild");
final File buildDir = new File(projectDir, "runner-with-exclude" + File.separator + "build");
final Path mainLib = buildDir.toPath().resolve("quarkus-app").resolve("lib").resolve("main");
assertThat(mainLib.resolve("org.acme.ext-a-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-b-1.0-SNAPSHOT.jar")).doesNotExist();
assertThat(mainLib.resolve("org.acme.ext-c-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-e-1.0-SNAPSHOT.jar")).doesNotExist();
assertThat(mainLib.resolve("org.acme.ext-d-1.0-SNAPSHOT.jar")).doesNotExist();
}
@Test
@Order(4)
public void shouldNotFailIfConditionalDependencyIsExplicitlyDeclared()
throws IOException, URISyntaxException, InterruptedException {
final File projectDir = getProjectDir("conditional-test-project");
runGradleWrapper(projectDir, "clean", ":runner-with-explicit-import:quarkusBuild");
final File buildDir = new File(projectDir, "runner-with-explicit-import" + File.separator + "build");
final Path mainLib = buildDir.toPath().resolve("quarkus-app").resolve("lib").resolve("main");
assertThat(mainLib.resolve("org.acme.ext-a-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-b-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-c-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-e-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-d-1.0-SNAPSHOT.jar")).doesNotExist();
}
@Test
@Order(5)
public void scenarioTwo() throws Exception {
final File projectDir = getProjectDir("conditional-test-project");
runGradleWrapper(projectDir, "clean", ":scenario-two:quarkusBuild", "-Dquarkus.package.type=mutable-jar");
final File buildDir = new File(projectDir, "scenario-two" + File.separator + "build");
final Path mainLib = buildDir.toPath().resolve("quarkus-app").resolve("lib").resolve("main");
assertThat(mainLib.resolve("org.acme.ext-f-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-g-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-h-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-i-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-j-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-k-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-l-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-m-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-n-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-o-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-p-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-r-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-s-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-t-1.0-SNAPSHOT.jar")).exists();
assertThat(mainLib.resolve("org.acme.ext-u-1.0-SNAPSHOT.jar")).exists();
final Path deploymentLib = buildDir.toPath().resolve("quarkus-app").resolve("lib").resolve("deployment");
assertThat(deploymentLib.resolve("org.acme.ext-f-deployment-1.0-SNAPSHOT.jar")).exists();
assertThat(deploymentLib.resolve("org.acme.ext-g-deployment-1.0-SNAPSHOT.jar")).exists();
assertThat(deploymentLib.resolve("org.acme.ext-h-deployment-1.0-SNAPSHOT.jar")).exists();
assertThat(deploymentLib.resolve("org.acme.ext-i-deployment-1.0-SNAPSHOT.jar")).exists();
assertThat(deploymentLib.resolve("org.acme.ext-j-deployment-1.0-SNAPSHOT.jar")).exists();
assertThat(deploymentLib.resolve("org.acme.ext-k-deployment-1.0-SNAPSHOT.jar")).exists();
assertThat(deploymentLib.resolve("org.acme.ext-l-deployment-1.0-SNAPSHOT.jar")).exists();
assertThat(deploymentLib.resolve("org.acme.ext-m-deployment-1.0-SNAPSHOT.jar")).exists();
assertThat(deploymentLib.resolve("org.acme.ext-n-deployment-1.0-SNAPSHOT.jar")).exists();
assertThat(deploymentLib.resolve("org.acme.ext-o-deployment-1.0-SNAPSHOT.jar")).exists();
assertThat(deploymentLib.resolve("org.acme.ext-p-deployment-1.0-SNAPSHOT.jar")).exists();
assertThat(deploymentLib.resolve("org.acme.ext-r-deployment-1.0-SNAPSHOT.jar")).exists();
assertThat(deploymentLib.resolve("org.acme.ext-s-deployment-1.0-SNAPSHOT.jar")).exists();
assertThat(deploymentLib.resolve("org.acme.ext-t-deployment-1.0-SNAPSHOT.jar")).exists();
assertThat(deploymentLib.resolve("org.acme.ext-u-deployment-1.0-SNAPSHOT.jar")).exists();
}
} |
why only process those node? | private boolean hasCTEConsumerUnderJoin(PhysicalPlan root, Set<CTEId> cteIds) {
if (root instanceof PhysicalCTEConsumer) {
cteIds.add(((PhysicalCTEConsumer) root).getCteId());
return true;
} else if (root.children().size() != 1) {
return false;
} else if (root instanceof PhysicalDistribute
|| root instanceof PhysicalFilter
|| root instanceof PhysicalProject) {
return hasCTEConsumerUnderJoin((PhysicalPlan) root.child(0), cteIds);
} else {
return false;
}
} | return false; | private boolean hasCTEConsumerUnderJoin(PhysicalPlan root, Set<CTEId> cteIds) {
if (root instanceof PhysicalCTEConsumer) {
cteIds.add(((PhysicalCTEConsumer) root).getCteId());
return true;
} else if (root.children().size() != 1) {
return false;
} else if (root instanceof PhysicalDistribute
|| root instanceof PhysicalFilter
|| root instanceof PhysicalProject) {
return hasCTEConsumerUnderJoin((PhysicalPlan) root.child(0), cteIds);
} else {
return false;
}
} | class RuntimeFilterGenerator extends PlanPostProcessor {
private static final ImmutableSet<JoinType> DENIED_JOIN_TYPES = ImmutableSet.of(
JoinType.LEFT_ANTI_JOIN,
JoinType.FULL_OUTER_JOIN,
JoinType.LEFT_OUTER_JOIN,
JoinType.NULL_AWARE_LEFT_ANTI_JOIN
);
private static final Set<Class<? extends PhysicalPlan>> SPJ_PLAN = ImmutableSet.of(
PhysicalOlapScan.class,
PhysicalProject.class,
PhysicalFilter.class,
PhysicalDistribute.class,
PhysicalHashJoin.class
);
private final IdGenerator<RuntimeFilterId> generator = RuntimeFilterId.createGenerator();
/**
* the runtime filter generator run at the phase of post process and plan translation of nereids planner.
* post process:
* first step: if encounter supported join type, generate nereids runtime filter for all the hash conjunctions
* and make association from exprId of the target slot references to the runtime filter. or delete the runtime
* filter whose target slot reference is one of the output slot references of the left child of the physical join as
* the runtime filter.
* second step: if encounter project, collect the association of its child and it for pushing down through
* the project node.
* plan translation:
* third step: generate nereids runtime filter target at olap scan node fragment.
* forth step: generate legacy runtime filter target and runtime filter at hash join node fragment.
* NOTICE: bottom-up travel the plan tree!!!
*/
@Override
public PhysicalPlan visitPhysicalHashJoin(PhysicalHashJoin<? extends Plan, ? extends Plan> join,
CascadesContext context) {
RuntimeFilterContext ctx = context.getRuntimeFilterContext();
Map<NamedExpression, Pair<PhysicalRelation, Slot>> aliasTransferMap = ctx.getAliasTransferMap();
join.right().accept(this, context);
join.left().accept(this, context);
collectPushDownCTEInfos(join, context);
if (DENIED_JOIN_TYPES.contains(join.getJoinType()) || join.isMarkJoin()) {
Set<Slot> slots = join.getOutputSet();
slots.forEach(aliasTransferMap::remove);
} else if (!getPushDownCTECandidates(ctx).isEmpty()) {
pushDownRuntimeFilterIntoCTE(ctx);
} else {
pushDownRuntimeFilterCommon(join, context);
}
return join;
}
@Override
public PhysicalCTEConsumer visitPhysicalCTEConsumer(PhysicalCTEConsumer scan, CascadesContext context) {
RuntimeFilterContext ctx = context.getRuntimeFilterContext();
scan.getOutput().forEach(slot -> ctx.getAliasTransferMap().put(slot, Pair.of(scan, slot)));
return scan;
}
@Override
public PhysicalCTEProducer visitPhysicalCTEProducer(PhysicalCTEProducer producer, CascadesContext context) {
CTEId id = producer.getCteId();
context.getRuntimeFilterContext().getCteProduceMap().put(id, producer);
return producer;
}
@Override
public PhysicalPlan visitPhysicalNestedLoopJoin(PhysicalNestedLoopJoin<? extends Plan, ? extends Plan> join,
CascadesContext context) {
join.right().accept(this, context);
join.left().accept(this, context);
if (join.getJoinType() != JoinType.LEFT_SEMI_JOIN && join.getJoinType() != JoinType.CROSS_JOIN) {
return join;
}
RuntimeFilterContext ctx = context.getRuntimeFilterContext();
Map<NamedExpression, Pair<PhysicalRelation, Slot>> aliasTransferMap = ctx.getAliasTransferMap();
if ((ctx.getSessionVariable().getRuntimeFilterType() & TRuntimeFilterType.BITMAP.getValue()) == 0) {
return join;
}
List<Slot> leftSlots = join.left().getOutput();
List<Slot> rightSlots = join.right().getOutput();
List<Expression> bitmapRuntimeFilterConditions = JoinUtils.extractBitmapRuntimeFilterConditions(leftSlots,
rightSlots, join.getOtherJoinConjuncts());
if (!JoinUtils.extractExpressionForHashTable(leftSlots, rightSlots, join.getOtherJoinConjuncts())
.first.isEmpty()) {
return join;
}
int bitmapRFCount = bitmapRuntimeFilterConditions.size();
for (int i = 0; i < bitmapRFCount; i++) {
Expression bitmapRuntimeFilterCondition = bitmapRuntimeFilterConditions.get(i);
boolean isNot = bitmapRuntimeFilterCondition instanceof Not;
BitmapContains bitmapContains;
if (bitmapRuntimeFilterCondition instanceof Not) {
bitmapContains = (BitmapContains) bitmapRuntimeFilterCondition.child(0);
} else {
bitmapContains = (BitmapContains) bitmapRuntimeFilterCondition;
}
TRuntimeFilterType type = TRuntimeFilterType.BITMAP;
Set<Slot> targetSlots = bitmapContains.child(1).getInputSlots();
for (Slot targetSlot : targetSlots) {
if (targetSlot != null && aliasTransferMap.containsKey(targetSlot)) {
Slot olapScanSlot = aliasTransferMap.get(targetSlot).second;
RuntimeFilter filter = new RuntimeFilter(generator.getNextId(),
bitmapContains.child(0), ImmutableList.of(olapScanSlot),
ImmutableList.of(bitmapContains.child(1)), type, i, join, isNot, -1L);
ctx.addJoinToTargetMap(join, olapScanSlot.getExprId());
ctx.setTargetExprIdToFilter(olapScanSlot.getExprId(), filter);
ctx.setTargetsOnScanNode(aliasTransferMap.get(targetSlot).first.getId(),
olapScanSlot);
join.addBitmapRuntimeFilterCondition(bitmapRuntimeFilterCondition);
}
}
}
return join;
}
@Override
public PhysicalPlan visitPhysicalProject(PhysicalProject<? extends Plan> project, CascadesContext context) {
project.child().accept(this, context);
Map<NamedExpression, Pair<PhysicalRelation, Slot>> aliasTransferMap
= context.getRuntimeFilterContext().getAliasTransferMap();
for (Expression expression : project.getProjects()) {
if (expression.children().isEmpty()) {
continue;
}
Expression expr = ExpressionUtils.getExpressionCoveredByCast(expression.child(0));
if (expr instanceof NamedExpression && aliasTransferMap.containsKey((NamedExpression) expr)) {
if (expression instanceof Alias) {
Alias alias = ((Alias) expression);
aliasTransferMap.put(alias.toSlot(), aliasTransferMap.get(expr));
}
}
}
return project;
}
@Override
public PhysicalRelation visitPhysicalScan(PhysicalRelation scan, CascadesContext context) {
RuntimeFilterContext ctx = context.getRuntimeFilterContext();
scan.getOutput().forEach(slot -> ctx.getAliasTransferMap().put(slot, Pair.of(scan, slot)));
return scan;
}
private long getBuildSideNdv(PhysicalHashJoin<? extends Plan, ? extends Plan> join, EqualTo equalTo) {
AbstractPlan right = (AbstractPlan) join.right();
if (right.getStats() == null) {
return -1L;
}
ExpressionEstimation estimator = new ExpressionEstimation();
ColumnStatistic buildColStats = equalTo.right().accept(estimator, right.getStats());
return buildColStats.isUnKnown ? -1 : Math.max(1, (long) buildColStats.ndv);
}
private static Slot checkTargetChild(Expression leftChild) {
Expression expression = ExpressionUtils.getExpressionCoveredByCast(leftChild);
return expression instanceof Slot ? ((Slot) expression) : null;
}
private void pushDownRuntimeFilterCommon(PhysicalHashJoin<? extends Plan, ? extends Plan> join,
CascadesContext context) {
RuntimeFilterContext ctx = context.getRuntimeFilterContext();
List<TRuntimeFilterType> legalTypes = Arrays.stream(TRuntimeFilterType.values())
.filter(type -> (type.getValue() & ctx.getSessionVariable().getRuntimeFilterType()) > 0)
.collect(Collectors.toList());
for (int i = 0; i < join.getHashJoinConjuncts().size(); i++) {
EqualTo equalTo = ((EqualTo) JoinUtils.swapEqualToForChildrenOrder(
(EqualTo) join.getHashJoinConjuncts().get(i), join.left().getOutputSet()));
for (TRuntimeFilterType type : legalTypes) {
if (type == TRuntimeFilterType.BITMAP) {
continue;
}
if (join.left() instanceof PhysicalUnion
|| join.left() instanceof PhysicalIntersect
|| join.left() instanceof PhysicalExcept) {
doPushDownIntoSetOperation(join, ctx, equalTo, type, i);
} else {
doPushDownBasic(join, context, ctx, equalTo, type, i);
}
}
}
}
private void doPushDownBasic(PhysicalHashJoin<? extends Plan, ? extends Plan> join, CascadesContext context,
RuntimeFilterContext ctx, EqualTo equalTo, TRuntimeFilterType type, int exprOrder) {
Map<NamedExpression, Pair<PhysicalRelation, Slot>> aliasTransferMap = ctx.getAliasTransferMap();
Slot unwrappedSlot = checkTargetChild(equalTo.left());
if (unwrappedSlot == null || !aliasTransferMap.containsKey(unwrappedSlot)) {
return;
}
Slot olapScanSlot = aliasTransferMap.get(unwrappedSlot).second;
PhysicalRelation scan = aliasTransferMap.get(unwrappedSlot).first;
Preconditions.checkState(olapScanSlot != null && scan != null);
if (scan instanceof PhysicalCTEConsumer) {
Set<CTEId> processedCTE = context.getRuntimeFilterContext().getProcessedCTE();
CTEId cteId = ((PhysicalCTEConsumer) scan).getCteId();
if (!processedCTE.contains(cteId)) {
PhysicalCTEProducer cteProducer = context.getRuntimeFilterContext()
.getCteProduceMap().get(cteId);
PhysicalPlan inputPlanNode = (PhysicalPlan) cteProducer.child(0);
inputPlanNode.accept(this, context);
processedCTE.add(cteId);
}
} else {
if (type == TRuntimeFilterType.IN_OR_BLOOM
&& ctx.getSessionVariable().enablePipelineEngine()
&& hasRemoteTarget(join, scan)) {
type = TRuntimeFilterType.BLOOM;
}
long buildSideNdv = getBuildSideNdv(join, equalTo);
RuntimeFilter filter = new RuntimeFilter(generator.getNextId(),
equalTo.right(), ImmutableList.of(olapScanSlot), type, exprOrder, join, buildSideNdv);
ctx.addJoinToTargetMap(join, olapScanSlot.getExprId());
ctx.setTargetExprIdToFilter(olapScanSlot.getExprId(), filter);
ctx.setTargetsOnScanNode(aliasTransferMap.get(unwrappedSlot).first.getId(), olapScanSlot);
}
}
private void doPushDownIntoSetOperation(PhysicalHashJoin<? extends Plan, ? extends Plan> join,
RuntimeFilterContext ctx, EqualTo equalTo, TRuntimeFilterType type, int exprOrder) {
Map<NamedExpression, Pair<PhysicalRelation, Slot>> aliasTransferMap = ctx.getAliasTransferMap();
List<Slot> targetList = new ArrayList<>();
int projIndex = -1;
for (int j = 0; j < join.left().children().size(); j++) {
PhysicalPlan child = (PhysicalPlan) join.left().child(j);
if (child instanceof PhysicalProject) {
PhysicalProject project = (PhysicalProject) child;
Slot leftSlot = checkTargetChild(equalTo.left());
if (leftSlot == null) {
break;
}
for (int k = 0; projIndex < 0 && k < project.getProjects().size(); k++) {
NamedExpression expr = (NamedExpression) project.getProjects().get(k);
if (expr.getName().equals(leftSlot.getName())) {
projIndex = k;
break;
}
}
Preconditions.checkState(projIndex >= 0
&& projIndex < project.getProjects().size());
NamedExpression targetExpr = (NamedExpression) project.getProjects().get(projIndex);
SlotReference origSlot = null;
if (targetExpr instanceof Alias) {
origSlot = (SlotReference) targetExpr.child(0);
} else {
origSlot = (SlotReference) targetExpr;
}
Slot olapScanSlot = aliasTransferMap.get(origSlot).second;
PhysicalRelation scan = aliasTransferMap.get(origSlot).first;
if (type == TRuntimeFilterType.IN_OR_BLOOM
&& ctx.getSessionVariable().enablePipelineEngine()
&& hasRemoteTarget(join, scan)) {
type = TRuntimeFilterType.BLOOM;
}
targetList.add(olapScanSlot);
ctx.addJoinToTargetMap(join, olapScanSlot.getExprId());
ctx.setTargetsOnScanNode(aliasTransferMap.get(origSlot).first.getId(), olapScanSlot);
}
}
if (!targetList.isEmpty()) {
long buildSideNdv = getBuildSideNdv(join, equalTo);
RuntimeFilter filter = new RuntimeFilter(generator.getNextId(),
equalTo.right(), targetList, type, exprOrder, join, buildSideNdv);
for (int j = 0; j < targetList.size(); j++) {
ctx.setTargetExprIdToFilter(targetList.get(j).getExprId(), filter);
}
}
}
private void collectPushDownCTEInfos(PhysicalHashJoin<? extends Plan, ? extends Plan> join,
CascadesContext context) {
RuntimeFilterContext ctx = context.getRuntimeFilterContext();
Set<CTEId> cteIds = new HashSet<>();
PhysicalPlan leftChild = (PhysicalPlan) join.left();
PhysicalPlan rightChild = (PhysicalPlan) join.right();
Preconditions.checkState(leftChild != null && rightChild != null);
boolean leftHasCTE = hasCTEConsumerUnderJoin(leftChild, cteIds);
boolean rightHasCTE = hasCTEConsumerUnderJoin(rightChild, cteIds);
if ((leftHasCTE && !rightHasCTE) || (!leftHasCTE && rightHasCTE)) {
for (CTEId id : cteIds) {
if (ctx.getCteToJoinsMap().get(id) == null) {
Set<PhysicalHashJoin> newJoin = new HashSet<>();
newJoin.add(join);
ctx.getCteToJoinsMap().put(id, newJoin);
} else {
ctx.getCteToJoinsMap().get(id).add(join);
}
}
}
if (!ctx.getCteToJoinsMap().isEmpty()) {
analyzeRuntimeFilterPushDownIntoCTEInfos(join, context);
}
}
private List<CTEId> getPushDownCTECandidates(RuntimeFilterContext ctx) {
List<CTEId> candidates = new ArrayList<>();
Map<PhysicalCTEProducer, Map<EqualTo, PhysicalHashJoin>> cteRFPushDownMap = ctx.getCteRFPushDownMap();
for (Map.Entry<PhysicalCTEProducer, Map<EqualTo, PhysicalHashJoin>> entry : cteRFPushDownMap.entrySet()) {
CTEId cteId = entry.getKey().getCteId();
if (ctx.getPushedDownCTE().contains(cteId)) {
continue;
}
candidates.add(cteId);
}
return candidates;
}
private void analyzeRuntimeFilterPushDownIntoCTEInfos(PhysicalHashJoin<? extends Plan, ? extends Plan> curJoin,
CascadesContext context) {
RuntimeFilterContext ctx = context.getRuntimeFilterContext();
Map<CTEId, Set<PhysicalHashJoin>> cteToJoinsMap = ctx.getCteToJoinsMap();
for (Map.Entry<CTEId, Set<PhysicalHashJoin>> entry : cteToJoinsMap.entrySet()) {
CTEId cteId = entry.getKey();
Set<PhysicalHashJoin> joinSet = entry.getValue();
if (joinSet.contains(curJoin)) {
continue;
}
Set<LogicalCTEConsumer> cteSet = context.getCteIdToConsumers().get(cteId);
Preconditions.checkState(!cteSet.isEmpty());
String cteName = cteSet.iterator().next().getName();
if (joinSet.size() != cteSet.size()) {
continue;
}
List<EqualTo> equalTos = new ArrayList<>();
Map<EqualTo, PhysicalHashJoin> equalCondToJoinMap = new LinkedHashMap<>();
for (PhysicalHashJoin join : joinSet) {
if (join.getOtherJoinConjuncts().size() > 1
|| join.getHashJoinConjuncts().size() != 1
|| !(join.getHashJoinConjuncts().get(0) instanceof EqualTo)
|| !(((EqualTo) join.getHashJoinConjuncts().get(0)).child(0) instanceof SlotReference)
|| !(((EqualTo) join.getHashJoinConjuncts().get(0)).child(1) instanceof SlotReference)) {
break;
} else {
EqualTo equalTo = (EqualTo) join.getHashJoinConjuncts().get(0);
equalTos.add(equalTo);
equalCondToJoinMap.put(equalTo, join);
}
}
if (joinSet.size() == equalTos.size()) {
int matchNum = 0;
Set<String> cteNameSet = new HashSet<>();
Set<SlotReference> anotherSideSlotSet = new HashSet<>();
for (EqualTo equalTo : equalTos) {
SlotReference left = (SlotReference) equalTo.left();
SlotReference right = (SlotReference) equalTo.right();
if (left.getQualifier().size() == 1 && left.getQualifier().get(0).equals(cteName)) {
matchNum += 1;
anotherSideSlotSet.add(right);
cteNameSet.add(left.getQualifiedName());
} else if (right.getQualifier().size() == 1 && right.getQualifier().get(0).equals(cteName)) {
matchNum += 1;
anotherSideSlotSet.add(left);
cteNameSet.add(right.getQualifiedName());
}
}
if (matchNum == equalTos.size() && cteNameSet.size() == 1) {
Preconditions.checkState(equalTos.size() == equalCondToJoinMap.size());
PhysicalCTEProducer cteProducer = context.getRuntimeFilterContext().getCteProduceMap().get(cteId);
if (anotherSideSlotSet.size() == 1) {
ctx.getCteRFPushDownMap().put(cteProducer, equalCondToJoinMap);
} else {
List<Expression> conditions = curJoin.getHashJoinConjuncts();
boolean inSameEqualSet = false;
for (Expression e : conditions) {
if (e instanceof EqualTo) {
SlotReference oneSide = (SlotReference) ((EqualTo) e).left();
SlotReference anotherSide = (SlotReference) ((EqualTo) e).right();
if (anotherSideSlotSet.contains(oneSide) && anotherSideSlotSet.contains(anotherSide)) {
inSameEqualSet = true;
break;
}
}
}
if (inSameEqualSet) {
ctx.getCteRFPushDownMap().put(cteProducer, equalCondToJoinMap);
}
}
}
}
}
}
private void pushDownRuntimeFilterIntoCTE(RuntimeFilterContext ctx) {
Map<PhysicalCTEProducer, Map<EqualTo, PhysicalHashJoin>> cteRFPushDownMap = ctx.getCteRFPushDownMap();
for (Map.Entry<PhysicalCTEProducer, Map<EqualTo, PhysicalHashJoin>> entry : cteRFPushDownMap.entrySet()) {
PhysicalCTEProducer cteProducer = entry.getKey();
if (ctx.getPushedDownCTE().contains(cteProducer.getCteId())) {
continue;
}
Map<EqualTo, PhysicalHashJoin> equalCondToJoinMap = entry.getValue();
int exprOrder = 0;
for (Map.Entry<EqualTo, PhysicalHashJoin> innerEntry : equalCondToJoinMap.entrySet()) {
EqualTo equalTo = innerEntry.getKey();
PhysicalHashJoin join = innerEntry.getValue();
Preconditions.checkState(cteProducer != null && join != null);
TRuntimeFilterType type = TRuntimeFilterType.IN_OR_BLOOM;
if (ctx.getSessionVariable().enablePipelineEngine()) {
type = TRuntimeFilterType.BLOOM;
}
EqualTo newEqualTo = ((EqualTo) JoinUtils.swapEqualToForChildrenOrder(
equalTo, join.child(0).getOutputSet()));
doPushDownIntoCTEProducerInternal(join, ctx, newEqualTo, type, exprOrder++, cteProducer);
}
ctx.getPushedDownCTE().add(cteProducer.getCteId());
}
}
private void doPushDownIntoCTEProducerInternal(PhysicalHashJoin<? extends Plan, ? extends Plan> join,
RuntimeFilterContext ctx, EqualTo equalTo, TRuntimeFilterType type, int exprOrder,
PhysicalCTEProducer cteProducer) {
Map<NamedExpression, Pair<PhysicalRelation, Slot>> aliasTransferMap = ctx.getAliasTransferMap();
PhysicalPlan inputPlanNode = (PhysicalPlan) cteProducer.child(0);
Preconditions.checkState(inputPlanNode != null);
Slot unwrappedSlot = checkTargetChild(equalTo.left());
if (unwrappedSlot == null || !aliasTransferMap.containsKey(unwrappedSlot)) {
return;
}
Slot cteSlot = aliasTransferMap.get(unwrappedSlot).second;
PhysicalRelation cteNode = aliasTransferMap.get(unwrappedSlot).first;
long buildSideNdv = getBuildSideNdv(join, equalTo);
if (cteNode instanceof PhysicalCTEConsumer && inputPlanNode instanceof PhysicalProject) {
PhysicalProject project = (PhysicalProject) inputPlanNode;
NamedExpression targetExpr = null;
for (Object column : project.getProjects()) {
NamedExpression alias = (NamedExpression) column;
if (cteSlot.getName().equals(alias.getName())) {
targetExpr = alias;
break;
}
}
Preconditions.checkState(targetExpr != null);
if (!(targetExpr instanceof SlotReference)) {
return;
} else if (!checkCanPushDownIntoBasicTable(project)) {
return;
} else {
Set<PhysicalOlapScan> pushDownBasicTables = getPushDownBasicTables(project, (SlotReference) targetExpr);
if (!pushDownBasicTables.isEmpty()) {
List<Slot> targetList = new ArrayList<>();
for (PhysicalOlapScan scan : pushDownBasicTables) {
Slot targetSlot = null;
for (Slot slot : scan.getBaseOutputs()) {
if (slot.getName().equals(targetExpr.getName())) {
targetSlot = slot;
break;
}
}
Preconditions.checkState(targetSlot != null);
targetList.add(targetSlot);
ctx.addJoinToTargetMap(join, targetSlot.getExprId());
ctx.setTargetsOnScanNode(scan.getId(), targetSlot);
}
RuntimeFilter filter = new RuntimeFilter(generator.getNextId(),
equalTo.right(), targetList, type, exprOrder, join, buildSideNdv);
for (Slot slot : targetList) {
ctx.setTargetExprIdToFilter(slot.getExprId(), filter);
}
}
}
}
}
private boolean checkCanPushDownIntoBasicTable(PhysicalPlan root) {
List<PhysicalPlan> plans = Lists.newArrayList();
plans.addAll(root.collect(PhysicalPlan.class::isInstance));
return plans.stream().allMatch(p -> SPJ_PLAN.stream().anyMatch(c -> c.isInstance(p)));
}
private Set<PhysicalOlapScan> getPushDownBasicTables(PhysicalPlan root, SlotReference slot) {
Set<PhysicalOlapScan> basicTableSet = new HashSet<>();
Set<PhysicalHashJoin> joins = new HashSet<>();
ExprId exprId = slot.getExprId();
String slotName = slot.getName();
Preconditions.checkState(exprId != null && slotName != null);
getAllRelatedTables(root, basicTableSet);
Set<PhysicalOlapScan> targetTableSet = basicTableSet.stream().filter(e ->
e.getBaseOutputs().stream().anyMatch(f -> f.getExprId() == exprId)).collect(Collectors.toSet());
Preconditions.checkState(targetTableSet.size() == 1);
String basicTableName = targetTableSet.iterator().next().getTable().getName();
Preconditions.checkState(basicTableName != null);
getAllJoinInfo(root, joins);
for (PhysicalHashJoin join : joins) {
List<Expression> conditions = join.getHashJoinConjuncts();
for (Expression equalTo : conditions) {
if (equalTo instanceof EqualTo && ((EqualTo) equalTo).left() instanceof SlotReference
&& ((EqualTo) equalTo).right() instanceof SlotReference
&& ((SlotReference) ((EqualTo) equalTo).left()).getName().equals(slotName)
&& ((SlotReference) ((EqualTo) equalTo).right()).getName().equals(slotName)) {
ExprId leftExprId = ((SlotReference) ((EqualTo) equalTo).left()).getExprId();
ExprId rightExprId = ((SlotReference) ((EqualTo) equalTo).right()).getExprId();
if (leftExprId == exprId || rightExprId == exprId) {
Set<PhysicalOlapScan> leftTableSet = basicTableSet.stream().filter(f ->
f.getBaseOutputs().stream().anyMatch(e -> e.getExprId() == leftExprId))
.filter(f -> f.getTable().getName().equals(basicTableName))
.collect(Collectors.toSet());
targetTableSet.addAll(leftTableSet);
Set<PhysicalOlapScan> rightTableSet = basicTableSet.stream().filter(f ->
f.getBaseOutputs().stream().anyMatch(e -> e.getExprId() == rightExprId))
.filter(f -> f.getTable().getName().equals(basicTableName))
.collect(Collectors.toSet());
targetTableSet.addAll(rightTableSet);
}
}
}
}
return targetTableSet;
}
private void getAllRelatedTables(PhysicalPlan root, Set<PhysicalOlapScan> basicTables) {
if (root instanceof PhysicalOlapScan) {
basicTables.add((PhysicalOlapScan) root);
} else {
for (Object child : root.children()) {
getAllRelatedTables((PhysicalPlan) child, basicTables);
}
}
}
private void getAllJoinInfo(PhysicalPlan root, Set<PhysicalHashJoin> joins) {
if (root instanceof PhysicalHashJoin) {
joins.add((PhysicalHashJoin) root);
} else {
for (Object child : root.children()) {
getAllJoinInfo((PhysicalPlan) child, joins);
}
}
}
private boolean hasRemoteTarget(AbstractPlan join, AbstractPlan scan) {
if (scan instanceof PhysicalCTEConsumer) {
return true;
} else {
Preconditions.checkArgument(join.getMutableState(AbstractPlan.FRAGMENT_ID).isPresent(),
"cannot find fragment id for Join node");
Preconditions.checkArgument(scan.getMutableState(AbstractPlan.FRAGMENT_ID).isPresent(),
"cannot find fragment id for scan node");
return join.getMutableState(AbstractPlan.FRAGMENT_ID).get()
!= scan.getMutableState(AbstractPlan.FRAGMENT_ID).get();
}
}
} | class RuntimeFilterGenerator extends PlanPostProcessor {
private static final ImmutableSet<JoinType> DENIED_JOIN_TYPES = ImmutableSet.of(
JoinType.LEFT_ANTI_JOIN,
JoinType.FULL_OUTER_JOIN,
JoinType.LEFT_OUTER_JOIN,
JoinType.NULL_AWARE_LEFT_ANTI_JOIN
);
private static final Set<Class<? extends PhysicalPlan>> SPJ_PLAN = ImmutableSet.of(
PhysicalOlapScan.class,
PhysicalProject.class,
PhysicalFilter.class,
PhysicalDistribute.class,
PhysicalHashJoin.class
);
private final IdGenerator<RuntimeFilterId> generator = RuntimeFilterId.createGenerator();
/**
* the runtime filter generator run at the phase of post process and plan translation of nereids planner.
* post process:
* first step: if encounter supported join type, generate nereids runtime filter for all the hash conjunctions
* and make association from exprId of the target slot references to the runtime filter. or delete the runtime
* filter whose target slot reference is one of the output slot references of the left child of the physical join as
* the runtime filter.
* second step: if encounter project, collect the association of its child and it for pushing down through
* the project node.
* plan translation:
* third step: generate nereids runtime filter target at olap scan node fragment.
* forth step: generate legacy runtime filter target and runtime filter at hash join node fragment.
* NOTICE: bottom-up travel the plan tree!!!
*/
@Override
public PhysicalPlan visitPhysicalHashJoin(PhysicalHashJoin<? extends Plan, ? extends Plan> join,
CascadesContext context) {
RuntimeFilterContext ctx = context.getRuntimeFilterContext();
Map<NamedExpression, Pair<PhysicalRelation, Slot>> aliasTransferMap = ctx.getAliasTransferMap();
join.right().accept(this, context);
join.left().accept(this, context);
if (DENIED_JOIN_TYPES.contains(join.getJoinType()) || join.isMarkJoin()) {
Set<Slot> slots = join.getOutputSet();
slots.forEach(aliasTransferMap::remove);
} else {
collectPushDownCTEInfos(join, context);
if (!getPushDownCTECandidates(ctx).isEmpty()) {
pushDownRuntimeFilterIntoCTE(ctx);
} else {
pushDownRuntimeFilterCommon(join, context);
}
}
return join;
}
@Override
public PhysicalCTEConsumer visitPhysicalCTEConsumer(PhysicalCTEConsumer scan, CascadesContext context) {
RuntimeFilterContext ctx = context.getRuntimeFilterContext();
scan.getOutput().forEach(slot -> ctx.getAliasTransferMap().put(slot, Pair.of(scan, slot)));
return scan;
}
@Override
public PhysicalCTEProducer visitPhysicalCTEProducer(PhysicalCTEProducer producer, CascadesContext context) {
CTEId id = producer.getCteId();
context.getRuntimeFilterContext().getCteProduceMap().put(id, producer);
return producer;
}
@Override
public PhysicalPlan visitPhysicalNestedLoopJoin(PhysicalNestedLoopJoin<? extends Plan, ? extends Plan> join,
CascadesContext context) {
join.right().accept(this, context);
join.left().accept(this, context);
if (join.getJoinType() != JoinType.LEFT_SEMI_JOIN && join.getJoinType() != JoinType.CROSS_JOIN) {
return join;
}
RuntimeFilterContext ctx = context.getRuntimeFilterContext();
Map<NamedExpression, Pair<PhysicalRelation, Slot>> aliasTransferMap = ctx.getAliasTransferMap();
if ((ctx.getSessionVariable().getRuntimeFilterType() & TRuntimeFilterType.BITMAP.getValue()) == 0) {
return join;
}
List<Slot> leftSlots = join.left().getOutput();
List<Slot> rightSlots = join.right().getOutput();
List<Expression> bitmapRuntimeFilterConditions = JoinUtils.extractBitmapRuntimeFilterConditions(leftSlots,
rightSlots, join.getOtherJoinConjuncts());
if (!JoinUtils.extractExpressionForHashTable(leftSlots, rightSlots, join.getOtherJoinConjuncts())
.first.isEmpty()) {
return join;
}
int bitmapRFCount = bitmapRuntimeFilterConditions.size();
for (int i = 0; i < bitmapRFCount; i++) {
Expression bitmapRuntimeFilterCondition = bitmapRuntimeFilterConditions.get(i);
boolean isNot = bitmapRuntimeFilterCondition instanceof Not;
BitmapContains bitmapContains;
if (bitmapRuntimeFilterCondition instanceof Not) {
bitmapContains = (BitmapContains) bitmapRuntimeFilterCondition.child(0);
} else {
bitmapContains = (BitmapContains) bitmapRuntimeFilterCondition;
}
TRuntimeFilterType type = TRuntimeFilterType.BITMAP;
Set<Slot> targetSlots = bitmapContains.child(1).getInputSlots();
for (Slot targetSlot : targetSlots) {
if (targetSlot != null && aliasTransferMap.containsKey(targetSlot)) {
Slot olapScanSlot = aliasTransferMap.get(targetSlot).second;
RuntimeFilter filter = new RuntimeFilter(generator.getNextId(),
bitmapContains.child(0), ImmutableList.of(olapScanSlot),
ImmutableList.of(bitmapContains.child(1)), type, i, join, isNot, -1L);
ctx.addJoinToTargetMap(join, olapScanSlot.getExprId());
ctx.setTargetExprIdToFilter(olapScanSlot.getExprId(), filter);
ctx.setTargetsOnScanNode(aliasTransferMap.get(targetSlot).first.getId(),
olapScanSlot);
join.addBitmapRuntimeFilterCondition(bitmapRuntimeFilterCondition);
}
}
}
return join;
}
@Override
public PhysicalPlan visitPhysicalProject(PhysicalProject<? extends Plan> project, CascadesContext context) {
project.child().accept(this, context);
Map<NamedExpression, Pair<PhysicalRelation, Slot>> aliasTransferMap
= context.getRuntimeFilterContext().getAliasTransferMap();
for (Expression expression : project.getProjects()) {
if (expression.children().isEmpty()) {
continue;
}
Expression expr = ExpressionUtils.getExpressionCoveredByCast(expression.child(0));
if (expr instanceof NamedExpression && aliasTransferMap.containsKey((NamedExpression) expr)) {
if (expression instanceof Alias) {
Alias alias = ((Alias) expression);
aliasTransferMap.put(alias.toSlot(), aliasTransferMap.get(expr));
}
}
}
return project;
}
@Override
public PhysicalRelation visitPhysicalScan(PhysicalRelation scan, CascadesContext context) {
RuntimeFilterContext ctx = context.getRuntimeFilterContext();
scan.getOutput().forEach(slot -> ctx.getAliasTransferMap().put(slot, Pair.of(scan, slot)));
return scan;
}
private long getBuildSideNdv(PhysicalHashJoin<? extends Plan, ? extends Plan> join, EqualTo equalTo) {
AbstractPlan right = (AbstractPlan) join.right();
if (right.getStats() == null) {
return -1L;
}
ExpressionEstimation estimator = new ExpressionEstimation();
ColumnStatistic buildColStats = equalTo.right().accept(estimator, right.getStats());
return buildColStats.isUnKnown ? -1 : Math.max(1, (long) buildColStats.ndv);
}
private static Slot checkTargetChild(Expression leftChild) {
Expression expression = ExpressionUtils.getExpressionCoveredByCast(leftChild);
return expression instanceof Slot ? ((Slot) expression) : null;
}
private void pushDownRuntimeFilterCommon(PhysicalHashJoin<? extends Plan, ? extends Plan> join,
CascadesContext context) {
RuntimeFilterContext ctx = context.getRuntimeFilterContext();
List<TRuntimeFilterType> legalTypes = Arrays.stream(TRuntimeFilterType.values())
.filter(type -> (type.getValue() & ctx.getSessionVariable().getRuntimeFilterType()) > 0)
.collect(Collectors.toList());
for (int i = 0; i < join.getHashJoinConjuncts().size(); i++) {
EqualTo equalTo = ((EqualTo) JoinUtils.swapEqualToForChildrenOrder(
(EqualTo) join.getHashJoinConjuncts().get(i), join.left().getOutputSet()));
for (TRuntimeFilterType type : legalTypes) {
if (type == TRuntimeFilterType.BITMAP) {
continue;
}
if (join.left() instanceof PhysicalUnion
|| join.left() instanceof PhysicalIntersect
|| join.left() instanceof PhysicalExcept) {
doPushDownIntoSetOperation(join, ctx, equalTo, type, i);
} else {
doPushDownBasic(join, context, ctx, equalTo, type, i);
}
}
}
}
private void doPushDownBasic(PhysicalHashJoin<? extends Plan, ? extends Plan> join, CascadesContext context,
RuntimeFilterContext ctx, EqualTo equalTo, TRuntimeFilterType type, int exprOrder) {
Map<NamedExpression, Pair<PhysicalRelation, Slot>> aliasTransferMap = ctx.getAliasTransferMap();
Slot unwrappedSlot = checkTargetChild(equalTo.left());
if (unwrappedSlot == null || !aliasTransferMap.containsKey(unwrappedSlot)) {
return;
}
Slot olapScanSlot = aliasTransferMap.get(unwrappedSlot).second;
PhysicalRelation scan = aliasTransferMap.get(unwrappedSlot).first;
Preconditions.checkState(olapScanSlot != null && scan != null);
if (scan instanceof PhysicalCTEConsumer) {
Set<CTEId> processedCTE = context.getRuntimeFilterContext().getProcessedCTE();
CTEId cteId = ((PhysicalCTEConsumer) scan).getCteId();
if (!processedCTE.contains(cteId)) {
PhysicalCTEProducer cteProducer = context.getRuntimeFilterContext()
.getCteProduceMap().get(cteId);
PhysicalPlan inputPlanNode = (PhysicalPlan) cteProducer.child(0);
inputPlanNode.accept(this, context);
processedCTE.add(cteId);
}
} else {
if (type == TRuntimeFilterType.IN_OR_BLOOM
&& ctx.getSessionVariable().enablePipelineEngine()
&& hasRemoteTarget(join, scan)) {
type = TRuntimeFilterType.BLOOM;
}
long buildSideNdv = getBuildSideNdv(join, equalTo);
RuntimeFilter filter = new RuntimeFilter(generator.getNextId(),
equalTo.right(), ImmutableList.of(olapScanSlot), type, exprOrder, join, buildSideNdv);
ctx.addJoinToTargetMap(join, olapScanSlot.getExprId());
ctx.setTargetExprIdToFilter(olapScanSlot.getExprId(), filter);
ctx.setTargetsOnScanNode(aliasTransferMap.get(unwrappedSlot).first.getId(), olapScanSlot);
}
}
private void doPushDownIntoSetOperation(PhysicalHashJoin<? extends Plan, ? extends Plan> join,
RuntimeFilterContext ctx, EqualTo equalTo, TRuntimeFilterType type, int exprOrder) {
Map<NamedExpression, Pair<PhysicalRelation, Slot>> aliasTransferMap = ctx.getAliasTransferMap();
List<Slot> targetList = new ArrayList<>();
int projIndex = -1;
for (int j = 0; j < join.left().children().size(); j++) {
PhysicalPlan child = (PhysicalPlan) join.left().child(j);
if (child instanceof PhysicalProject) {
PhysicalProject project = (PhysicalProject) child;
Slot leftSlot = checkTargetChild(equalTo.left());
if (leftSlot == null) {
break;
}
for (int k = 0; projIndex < 0 && k < project.getProjects().size(); k++) {
NamedExpression expr = (NamedExpression) project.getProjects().get(k);
if (expr.getName().equals(leftSlot.getName())) {
projIndex = k;
break;
}
}
Preconditions.checkState(projIndex >= 0
&& projIndex < project.getProjects().size());
NamedExpression targetExpr = (NamedExpression) project.getProjects().get(projIndex);
SlotReference origSlot = null;
if (targetExpr instanceof Alias) {
origSlot = (SlotReference) targetExpr.child(0);
} else {
origSlot = (SlotReference) targetExpr;
}
Slot olapScanSlot = aliasTransferMap.get(origSlot).second;
PhysicalRelation scan = aliasTransferMap.get(origSlot).first;
if (type == TRuntimeFilterType.IN_OR_BLOOM
&& ctx.getSessionVariable().enablePipelineEngine()
&& hasRemoteTarget(join, scan)) {
type = TRuntimeFilterType.BLOOM;
}
targetList.add(olapScanSlot);
ctx.addJoinToTargetMap(join, olapScanSlot.getExprId());
ctx.setTargetsOnScanNode(aliasTransferMap.get(origSlot).first.getId(), olapScanSlot);
}
}
if (!targetList.isEmpty()) {
long buildSideNdv = getBuildSideNdv(join, equalTo);
RuntimeFilter filter = new RuntimeFilter(generator.getNextId(),
equalTo.right(), targetList, type, exprOrder, join, buildSideNdv);
for (int j = 0; j < targetList.size(); j++) {
ctx.setTargetExprIdToFilter(targetList.get(j).getExprId(), filter);
}
}
}
private void collectPushDownCTEInfos(PhysicalHashJoin<? extends Plan, ? extends Plan> join,
CascadesContext context) {
RuntimeFilterContext ctx = context.getRuntimeFilterContext();
Set<CTEId> cteIds = new HashSet<>();
PhysicalPlan leftChild = (PhysicalPlan) join.left();
PhysicalPlan rightChild = (PhysicalPlan) join.right();
Preconditions.checkState(leftChild != null && rightChild != null);
boolean leftHasCTE = hasCTEConsumerUnderJoin(leftChild, cteIds);
boolean rightHasCTE = hasCTEConsumerUnderJoin(rightChild, cteIds);
if ((leftHasCTE && !rightHasCTE) || (!leftHasCTE && rightHasCTE)) {
for (CTEId id : cteIds) {
if (ctx.getCteToJoinsMap().get(id) == null) {
Set<PhysicalHashJoin> newJoin = new HashSet<>();
newJoin.add(join);
ctx.getCteToJoinsMap().put(id, newJoin);
} else {
ctx.getCteToJoinsMap().get(id).add(join);
}
}
}
if (!ctx.getCteToJoinsMap().isEmpty()) {
analyzeRuntimeFilterPushDownIntoCTEInfos(join, context);
}
}
private List<CTEId> getPushDownCTECandidates(RuntimeFilterContext ctx) {
List<CTEId> candidates = new ArrayList<>();
Map<PhysicalCTEProducer, Map<EqualTo, PhysicalHashJoin>> cteRFPushDownMap = ctx.getCteRFPushDownMap();
for (Map.Entry<PhysicalCTEProducer, Map<EqualTo, PhysicalHashJoin>> entry : cteRFPushDownMap.entrySet()) {
CTEId cteId = entry.getKey().getCteId();
if (ctx.getPushedDownCTE().contains(cteId)) {
continue;
}
candidates.add(cteId);
}
return candidates;
}
private void analyzeRuntimeFilterPushDownIntoCTEInfos(PhysicalHashJoin<? extends Plan, ? extends Plan> curJoin,
CascadesContext context) {
RuntimeFilterContext ctx = context.getRuntimeFilterContext();
Map<CTEId, Set<PhysicalHashJoin>> cteToJoinsMap = ctx.getCteToJoinsMap();
for (Map.Entry<CTEId, Set<PhysicalHashJoin>> entry : cteToJoinsMap.entrySet()) {
CTEId cteId = entry.getKey();
Set<PhysicalHashJoin> joinSet = entry.getValue();
if (joinSet.contains(curJoin)) {
continue;
}
Set<LogicalCTEConsumer> cteSet = context.getCteIdToConsumers().get(cteId);
Preconditions.checkState(!cteSet.isEmpty());
String cteName = cteSet.iterator().next().getName();
if (joinSet.size() != cteSet.size()) {
continue;
}
List<EqualTo> equalTos = new ArrayList<>();
Map<EqualTo, PhysicalHashJoin> equalCondToJoinMap = new LinkedHashMap<>();
for (PhysicalHashJoin join : joinSet) {
if (join.getOtherJoinConjuncts().size() > 1
|| join.getHashJoinConjuncts().size() != 1
|| !(join.getHashJoinConjuncts().get(0) instanceof EqualTo)) {
break;
} else {
EqualTo equalTo = (EqualTo) join.getHashJoinConjuncts().get(0);
equalTos.add(equalTo);
equalCondToJoinMap.put(equalTo, join);
}
}
if (joinSet.size() == equalTos.size()) {
int matchNum = 0;
Set<String> cteNameSet = new HashSet<>();
Set<SlotReference> anotherSideSlotSet = new HashSet<>();
for (EqualTo equalTo : equalTos) {
SlotReference left = (SlotReference) equalTo.left();
SlotReference right = (SlotReference) equalTo.right();
if (left.getQualifier().size() == 1 && left.getQualifier().get(0).equals(cteName)) {
matchNum += 1;
anotherSideSlotSet.add(right);
cteNameSet.add(left.getQualifiedName());
} else if (right.getQualifier().size() == 1 && right.getQualifier().get(0).equals(cteName)) {
matchNum += 1;
anotherSideSlotSet.add(left);
cteNameSet.add(right.getQualifiedName());
}
}
if (matchNum == equalTos.size() && cteNameSet.size() == 1) {
Preconditions.checkState(equalTos.size() == equalCondToJoinMap.size(),
"equalTos.size() != equalCondToJoinMap.size()");
PhysicalCTEProducer cteProducer = context.getRuntimeFilterContext().getCteProduceMap().get(cteId);
if (anotherSideSlotSet.size() == 1) {
ctx.getCteRFPushDownMap().put(cteProducer, equalCondToJoinMap);
} else {
List<Expression> conditions = curJoin.getHashJoinConjuncts();
boolean inSameEqualSet = false;
for (Expression e : conditions) {
if (e instanceof EqualTo) {
SlotReference oneSide = (SlotReference) ((EqualTo) e).left();
SlotReference anotherSide = (SlotReference) ((EqualTo) e).right();
if (anotherSideSlotSet.contains(oneSide) && anotherSideSlotSet.contains(anotherSide)) {
inSameEqualSet = true;
break;
}
}
}
if (inSameEqualSet) {
ctx.getCteRFPushDownMap().put(cteProducer, equalCondToJoinMap);
}
}
}
}
}
}
private void pushDownRuntimeFilterIntoCTE(RuntimeFilterContext ctx) {
Map<PhysicalCTEProducer, Map<EqualTo, PhysicalHashJoin>> cteRFPushDownMap = ctx.getCteRFPushDownMap();
for (Map.Entry<PhysicalCTEProducer, Map<EqualTo, PhysicalHashJoin>> entry : cteRFPushDownMap.entrySet()) {
PhysicalCTEProducer cteProducer = entry.getKey();
Preconditions.checkState(cteProducer != null);
if (ctx.getPushedDownCTE().contains(cteProducer.getCteId())) {
continue;
}
Map<EqualTo, PhysicalHashJoin> equalCondToJoinMap = entry.getValue();
int exprOrder = 0;
for (Map.Entry<EqualTo, PhysicalHashJoin> innerEntry : equalCondToJoinMap.entrySet()) {
EqualTo equalTo = innerEntry.getKey();
PhysicalHashJoin join = innerEntry.getValue();
Preconditions.checkState(join != null);
TRuntimeFilterType type = TRuntimeFilterType.IN_OR_BLOOM;
if (ctx.getSessionVariable().enablePipelineEngine()) {
type = TRuntimeFilterType.BLOOM;
}
EqualTo newEqualTo = ((EqualTo) JoinUtils.swapEqualToForChildrenOrder(
equalTo, join.child(0).getOutputSet()));
doPushDownIntoCTEProducerInternal(join, ctx, newEqualTo, type, exprOrder++, cteProducer);
}
ctx.getPushedDownCTE().add(cteProducer.getCteId());
}
}
private void doPushDownIntoCTEProducerInternal(PhysicalHashJoin<? extends Plan, ? extends Plan> join,
RuntimeFilterContext ctx, EqualTo equalTo, TRuntimeFilterType type, int exprOrder,
PhysicalCTEProducer cteProducer) {
Map<NamedExpression, Pair<PhysicalRelation, Slot>> aliasTransferMap = ctx.getAliasTransferMap();
PhysicalPlan inputPlanNode = (PhysicalPlan) cteProducer.child(0);
Slot unwrappedSlot = checkTargetChild(equalTo.left());
if (unwrappedSlot == null || !aliasTransferMap.containsKey(unwrappedSlot)) {
return;
}
Slot cteSlot = aliasTransferMap.get(unwrappedSlot).second;
PhysicalRelation cteNode = aliasTransferMap.get(unwrappedSlot).first;
long buildSideNdv = getBuildSideNdv(join, equalTo);
if (cteNode instanceof PhysicalCTEConsumer && inputPlanNode instanceof PhysicalProject) {
PhysicalProject project = (PhysicalProject) inputPlanNode;
NamedExpression targetExpr = null;
for (Object column : project.getProjects()) {
NamedExpression alias = (NamedExpression) column;
if (cteSlot.getName().equals(alias.getName())) {
targetExpr = alias;
break;
}
}
Preconditions.checkState(targetExpr != null);
if (!(targetExpr instanceof SlotReference)) {
return;
} else if (!checkCanPushDownIntoBasicTable(project)) {
return;
} else {
Map<Slot, PhysicalOlapScan> pushDownBasicTableInfos = getPushDownBasicTablesInfos(project,
(SlotReference) targetExpr, aliasTransferMap);
if (!pushDownBasicTableInfos.isEmpty()) {
List<Slot> targetList = new ArrayList<>();
for (Map.Entry<Slot, PhysicalOlapScan> entry : pushDownBasicTableInfos.entrySet()) {
Slot targetSlot = entry.getKey();
PhysicalOlapScan scan = entry.getValue();
targetList.add(targetSlot);
ctx.addJoinToTargetMap(join, targetSlot.getExprId());
ctx.setTargetsOnScanNode(scan.getId(), targetSlot);
}
RuntimeFilter filter = new RuntimeFilter(generator.getNextId(),
equalTo.right(), targetList, type, exprOrder, join, buildSideNdv);
for (Slot slot : targetList) {
ctx.setTargetExprIdToFilter(slot.getExprId(), filter);
}
}
}
}
}
private boolean checkCanPushDownIntoBasicTable(PhysicalPlan root) {
List<PhysicalPlan> plans = Lists.newArrayList();
plans.addAll(root.collect(PhysicalPlan.class::isInstance));
return plans.stream().allMatch(p -> SPJ_PLAN.stream().anyMatch(c -> c.isInstance(p)));
}
private Map<Slot, PhysicalOlapScan> getPushDownBasicTablesInfos(PhysicalPlan root, SlotReference slot,
Map<NamedExpression, Pair<PhysicalRelation, Slot>> aliasTransferMap) {
Map<Slot, PhysicalOlapScan> basicTableInfos = new HashMap<>();
Set<PhysicalHashJoin> joins = new HashSet<>();
ExprId exprId = slot.getExprId();
if (aliasTransferMap.get(slot) != null && aliasTransferMap.get(slot).first instanceof PhysicalOlapScan) {
basicTableInfos.put(slot, (PhysicalOlapScan) aliasTransferMap.get(slot).first);
}
getAllJoinInfo(root, joins);
for (PhysicalHashJoin join : joins) {
List<Expression> conditions = join.getHashJoinConjuncts();
for (Expression equalTo : conditions) {
if (equalTo instanceof EqualTo) {
SlotReference leftSlot = (SlotReference) ((EqualTo) equalTo).left();
SlotReference rightSlot = (SlotReference) ((EqualTo) equalTo).right();
if (leftSlot.getExprId() == exprId) {
PhysicalOlapScan rightTable = (PhysicalOlapScan) aliasTransferMap.get(rightSlot).first;
if (rightTable != null) {
basicTableInfos.put(rightSlot, rightTable);
}
} else if (rightSlot.getExprId() == exprId) {
PhysicalOlapScan leftTable = (PhysicalOlapScan) aliasTransferMap.get(leftSlot).first;
if (leftTable != null) {
basicTableInfos.put(leftSlot, leftTable);
}
}
}
}
}
return basicTableInfos;
}
private void getAllJoinInfo(PhysicalPlan root, Set<PhysicalHashJoin> joins) {
if (root instanceof PhysicalHashJoin) {
joins.add((PhysicalHashJoin) root);
} else {
for (Object child : root.children()) {
getAllJoinInfo((PhysicalPlan) child, joins);
}
}
}
private boolean hasRemoteTarget(AbstractPlan join, AbstractPlan scan) {
if (scan instanceof PhysicalCTEConsumer) {
return true;
} else {
Preconditions.checkArgument(join.getMutableState(AbstractPlan.FRAGMENT_ID).isPresent(),
"cannot find fragment id for Join node");
Preconditions.checkArgument(scan.getMutableState(AbstractPlan.FRAGMENT_ID).isPresent(),
"cannot find fragment id for scan node");
return join.getMutableState(AbstractPlan.FRAGMENT_ID).get()
!= scan.getMutableState(AbstractPlan.FRAGMENT_ID).get();
}
}
} |
what's that ? BlockHole ? | public static void collectMaterializedViewMetrics(MetricVisitor visitor, boolean minifyMetrics) {
MaterializedViewMetricsRegistry instance = MaterializedViewMetricsRegistry.getInstance();
for (Map.Entry<MvId, MaterializedViewMetricsEntity> e : instance.idToMVMetrics.entrySet()) {
IMaterializedViewMetricsEntity mvEntity = e.getValue();
if (mvEntity == null || mvEntity instanceof MaterializedViewMetricsBlackHoleEntity) {
continue;
}
MvId mvId = e.getKey();
MaterializedViewMetricsEntity entity = (MaterializedViewMetricsEntity) mvEntity;
for (Metric m : entity.getMetrics()) {
if (minifyMetrics) {
if (null == m.getValue()) {
continue;
}
if (Metric.MetricType.GAUGE == m.type) {
continue;
}
if (Metric.MetricType.COUNTER == m.type && ((Long) m.getValue()).longValue() == 0L) {
continue;
}
}
m.addLabel(new MetricLabel("db_name", entity.dbName))
.addLabel(new MetricLabel("mv_name", entity.mvName))
.addLabel(new MetricLabel("mv_id", String.valueOf(mvId.getId())));
visitor.visit(m);
}
}
if (!minifyMetrics) {
for (Map.Entry<String, Histogram> e : MaterializedViewMetricsRegistry.getInstance()
.metricRegistry.getHistograms().entrySet()) {
visitor.visitHistogram(e.getKey(), e.getValue());
}
}
} | if (mvEntity == null || mvEntity instanceof MaterializedViewMetricsBlackHoleEntity) { | public static void collectMaterializedViewMetrics(MetricVisitor visitor, boolean minifyMetrics) {
MaterializedViewMetricsRegistry instance = MaterializedViewMetricsRegistry.getInstance();
for (Map.Entry<MvId, MaterializedViewMetricsEntity> e : instance.idToMVMetrics.entrySet()) {
IMaterializedViewMetricsEntity mvEntity = e.getValue();
if (mvEntity == null || mvEntity instanceof MaterializedViewMetricsBlackHoleEntity) {
continue;
}
MvId mvId = e.getKey();
MaterializedViewMetricsEntity entity = (MaterializedViewMetricsEntity) mvEntity;
for (Metric m : entity.getMetrics()) {
if (minifyMetrics) {
if (null == m.getValue()) {
continue;
}
if (Metric.MetricType.GAUGE == m.type) {
continue;
}
if (Metric.MetricType.COUNTER == m.type && ((Long) m.getValue()).longValue() == 0L) {
continue;
}
}
m.addLabel(new MetricLabel("db_name", entity.dbName))
.addLabel(new MetricLabel("mv_name", entity.mvName))
.addLabel(new MetricLabel("mv_id", String.valueOf(mvId.getId())));
visitor.visit(m);
}
}
if (!minifyMetrics) {
for (Map.Entry<String, Histogram> e : MaterializedViewMetricsRegistry.getInstance()
.metricRegistry.getHistograms().entrySet()) {
visitor.visitHistogram(e.getKey(), e.getValue());
}
}
} | class MetricsCleaner extends TimerTask {
@Override
public void run() {
synchronized (MaterializedViewMetricsRegistry.this) {
idToMVMetrics.clear();
}
}
} | class MetricsCleaner extends TimerTask {
@Override
public void run() {
synchronized (MaterializedViewMetricsRegistry.this) {
idToMVMetrics.clear();
}
}
} |
it seems that simple range/in/eq predicates's column can be prefix of columns of rollup. | private int calcSortScore(MaterializedView mv, Set<String> equivalenceColumns, Set<String> nonEquivalenceColumns) {
List<Column> schema = mv.getBaseSchema();
int score = 0;
for (Column col : schema) {
String columName = col.getName().toLowerCase();
if (equivalenceColumns.contains(columName)) {
score++;
} else if (nonEquivalenceColumns.contains(columName)) {
score++;
break;
} else {
break;
}
}
return score;
} | private int calcSortScore(MaterializedView mv, Set<String> equivalenceColumns, Set<String> nonEquivalenceColumns) {
List<Column> keyColumns = mv.getKeyColumnsByIndexId(mv.getBaseIndexId());
int score = 0;
for (Column col : keyColumns) {
String columName = col.getName().toLowerCase();
if (equivalenceColumns.contains(columName)) {
score++;
} else if (nonEquivalenceColumns.contains(columName)) {
score++;
break;
} else {
break;
}
}
return score;
} | class CandidateContextComparator implements Comparator<CandidateContext> {
@Override
public int compare(CandidateContext context1, CandidateContext context2) {
int ret = Integer.compare(context1.getGroupbyColumnNum(), context2.getGroupbyColumnNum());
if (ret != 0) {
return ret;
}
ret = Double.compare(context1.getMvStatistics().getOutputRowCount(),
context2.getMvStatistics().getOutputRowCount());
if (ret != 0) {
return ret;
}
ret = Integer.compare(context2.sortScore, context1.sortScore);
if (ret != 0) {
return ret;
}
ret = Integer.compare(context1.getSchemaColumnNum(), context2.getSchemaColumnNum());
if (ret != 0) {
return ret;
}
ret = Double.compare(context1.getMvStatistics().getComputeSize(), context2.getMvStatistics().getComputeSize());
return ret != 0 ? ret : Integer.compare(context1.getIndex(), context2.getIndex());
}
} | class CandidateContextComparator implements Comparator<CandidateContext> {
@Override
public int compare(CandidateContext context1, CandidateContext context2) {
int ret = Integer.compare(context1.getGroupbyColumnNum(), context2.getGroupbyColumnNum());
if (ret != 0) {
return ret;
}
ret = Integer.compare(context2.sortScore, context1.sortScore);
if (ret != 0) {
return ret;
}
ret = Double.compare(context1.getMvStatistics().getOutputRowCount(),
context2.getMvStatistics().getOutputRowCount());
if (ret != 0) {
return ret;
}
ret = Integer.compare(context1.getSchemaColumnNum(), context2.getSchemaColumnNum());
if (ret != 0) {
return ret;
}
ret = Double.compare(context1.getMvStatistics().getComputeSize(), context2.getMvStatistics().getComputeSize());
return ret != 0 ? ret : Integer.compare(context1.getIndex(), context2.getIndex());
}
} |
|
Could we set `rowEvents = new ArrayList<>();` on CommitTXEvent, but wait next BeginTXEvent | private void processEventWithTX(final AbstractWALEvent event) {
if (event instanceof BeginTXEvent) {
rowEvents = new ArrayList<>();
return;
}
if (event instanceof AbstractRowEvent) {
rowEvents.add((AbstractRowEvent) event);
return;
}
if (event instanceof CommitTXEvent) {
List<Record> records = new LinkedList<>();
for (AbstractWALEvent each : rowEvents) {
records.add(walEventConverter.convert(each));
}
records.add(walEventConverter.convert(event));
channel.pushRecords(records);
}
} | } | private void processEventWithTX(final AbstractWALEvent event) {
if (event instanceof BeginTXEvent) {
rowEvents = new ArrayList<>();
return;
}
if (event instanceof AbstractRowEvent) {
rowEvents.add((AbstractRowEvent) event);
return;
}
if (event instanceof CommitTXEvent) {
List<Record> records = new LinkedList<>();
for (AbstractWALEvent each : rowEvents) {
records.add(walEventConverter.convert(each));
}
records.add(walEventConverter.convert(event));
channel.pushRecords(records);
}
} | class PostgreSQLWALDumper extends AbstractLifecycleExecutor implements IncrementalDumper {
private final DumperConfiguration dumperConfig;
private final WALPosition walPosition;
private final PipelineChannel channel;
private final WALEventConverter walEventConverter;
private final PostgreSQLLogicalReplication logicalReplication;
private final boolean decodeWithTX;
private List<AbstractRowEvent> rowEvents = new LinkedList<>();
public PostgreSQLWALDumper(final DumperConfiguration dumperConfig, final IngestPosition position,
final PipelineChannel channel, final PipelineTableMetaDataLoader metaDataLoader) {
ShardingSpherePreconditions.checkState(StandardPipelineDataSourceConfiguration.class.equals(dumperConfig.getDataSourceConfig().getClass()),
() -> new UnsupportedSQLOperationException("PostgreSQLWALDumper only support PipelineDataSourceConfiguration"));
this.dumperConfig = dumperConfig;
walPosition = (WALPosition) position;
this.channel = channel;
walEventConverter = new WALEventConverter(dumperConfig, metaDataLoader);
logicalReplication = new PostgreSQLLogicalReplication();
this.decodeWithTX = dumperConfig.isDecodeWithTX();
}
@SneakyThrows(InterruptedException.class)
@Override
protected void runBlocking() {
try (
Connection connection = logicalReplication.createConnection((StandardPipelineDataSourceConfiguration) dumperConfig.getDataSourceConfig());
PGReplicationStream stream = logicalReplication.createReplicationStream(connection, PostgreSQLPositionInitializer.getUniqueSlotName(connection, dumperConfig.getJobId()),
walPosition.getLogSequenceNumber())) {
PostgreSQLTimestampUtils utils = new PostgreSQLTimestampUtils(connection.unwrap(PgConnection.class).getTimestampUtils());
DecodingPlugin decodingPlugin = new TestDecodingPlugin(utils);
while (isRunning()) {
ByteBuffer message = stream.readPending();
if (null == message) {
Thread.sleep(10L);
continue;
}
AbstractWALEvent event = decodingPlugin.decode(message, new PostgreSQLLogSequenceNumber(stream.getLastReceiveLSN()));
if (decodeWithTX) {
processEventWithTX(event);
} else {
processEventIgnoreTX(event);
}
}
} catch (final SQLException ex) {
throw new IngestException(ex);
}
}
private void processEventIgnoreTX(final AbstractWALEvent event) {
if (event instanceof BeginTXEvent) {
return;
}
channel.pushRecords(Collections.singletonList(walEventConverter.convert(event)));
}
@Override
protected void doStop() {
}
} | class PostgreSQLWALDumper extends AbstractLifecycleExecutor implements IncrementalDumper {
private final DumperConfiguration dumperConfig;
private final WALPosition walPosition;
private final PipelineChannel channel;
private final WALEventConverter walEventConverter;
private final PostgreSQLLogicalReplication logicalReplication;
private final boolean decodeWithTX;
private List<AbstractRowEvent> rowEvents = new LinkedList<>();
public PostgreSQLWALDumper(final DumperConfiguration dumperConfig, final IngestPosition position,
final PipelineChannel channel, final PipelineTableMetaDataLoader metaDataLoader) {
ShardingSpherePreconditions.checkState(StandardPipelineDataSourceConfiguration.class.equals(dumperConfig.getDataSourceConfig().getClass()),
() -> new UnsupportedSQLOperationException("PostgreSQLWALDumper only support PipelineDataSourceConfiguration"));
this.dumperConfig = dumperConfig;
walPosition = (WALPosition) position;
this.channel = channel;
walEventConverter = new WALEventConverter(dumperConfig, metaDataLoader);
logicalReplication = new PostgreSQLLogicalReplication();
this.decodeWithTX = dumperConfig.isDecodeWithTX();
}
@SneakyThrows(InterruptedException.class)
@Override
protected void runBlocking() {
try (
Connection connection = logicalReplication.createConnection((StandardPipelineDataSourceConfiguration) dumperConfig.getDataSourceConfig());
PGReplicationStream stream = logicalReplication.createReplicationStream(connection, PostgreSQLPositionInitializer.getUniqueSlotName(connection, dumperConfig.getJobId()),
walPosition.getLogSequenceNumber())) {
PostgreSQLTimestampUtils utils = new PostgreSQLTimestampUtils(connection.unwrap(PgConnection.class).getTimestampUtils());
DecodingPlugin decodingPlugin = new TestDecodingPlugin(utils);
while (isRunning()) {
ByteBuffer message = stream.readPending();
if (null == message) {
Thread.sleep(10L);
continue;
}
AbstractWALEvent event = decodingPlugin.decode(message, new PostgreSQLLogSequenceNumber(stream.getLastReceiveLSN()));
if (decodeWithTX) {
processEventWithTX(event);
} else {
processEventIgnoreTX(event);
}
}
} catch (final SQLException ex) {
throw new IngestException(ex);
}
}
private void processEventIgnoreTX(final AbstractWALEvent event) {
if (event instanceof BeginTXEvent) {
return;
}
channel.pushRecords(Collections.singletonList(walEventConverter.convert(event)));
}
@Override
protected void doStop() {
}
} |
Is this change necessary? Since the hashed operator id is the ground truth and always exists. | public CheckpointMetadata map(CheckpointMetadata value) throws Exception {
final List<OperatorState> mapped =
value.getOperatorStates().stream()
.map(
operatorState -> {
OperatorIdentifier operatorIdentifier;
if (operatorState.getOperatorUid().isPresent()) {
operatorIdentifier =
OperatorIdentifier.forUid(
operatorState.getOperatorUid().get());
} else {
operatorIdentifier =
OperatorIdentifier.forUidHash(
operatorState
.getOperatorID()
.toHexString());
}
final OperatorIdentifier transformedIdentifier =
uidTransformationMap.remove(operatorIdentifier);
if (transformedIdentifier != null) {
return operatorState.copyWithNewIDs(
transformedIdentifier.getUid().orElse(null),
transformedIdentifier.getOperatorId());
}
return operatorState;
})
.collect(Collectors.toList());
return new CheckpointMetadata(value.getCheckpointId(), mapped, value.getMasterStates());
} | } | public CheckpointMetadata map(CheckpointMetadata value) throws Exception {
final List<OperatorState> mapped =
value.getOperatorStates().stream()
.map(
operatorState -> {
OperatorIdentifier operatorIdentifier;
if (operatorState.getOperatorUid().isPresent()) {
operatorIdentifier =
OperatorIdentifier.forUid(
operatorState.getOperatorUid().get());
} else {
operatorIdentifier =
OperatorIdentifier.forUidHash(
operatorState
.getOperatorID()
.toHexString());
}
final OperatorIdentifier transformedIdentifier =
uidTransformationMap.remove(operatorIdentifier);
if (transformedIdentifier != null) {
return operatorState.copyWithNewIDs(
transformedIdentifier.getUid().orElse(null),
transformedIdentifier.getOperatorId());
}
return operatorState;
})
.collect(Collectors.toList());
return new CheckpointMetadata(value.getCheckpointId(), mapped, value.getMasterStates());
} | class CheckpointMetadataCheckpointMetadataMapFunction
extends RichMapFunction<CheckpointMetadata, CheckpointMetadata> {
private static final long serialVersionUID = 1L;
private final Map<OperatorIdentifier, OperatorIdentifier> uidTransformationMap;
public CheckpointMetadataCheckpointMetadataMapFunction(
Map<OperatorIdentifier, OperatorIdentifier> uidTransformationMap) {
this.uidTransformationMap = new HashMap<>(uidTransformationMap);
}
@Override
@Override
public void close() throws Exception {
if (!uidTransformationMap.isEmpty()) {
throw new FlinkRuntimeException(
"Some identifier changes were never applied!"
+ uidTransformationMap.entrySet().stream()
.map(Map.Entry::toString)
.collect(Collectors.joining("\n\t", "\n\t", "")));
}
}
} | class CheckpointMetadataCheckpointMetadataMapFunction
extends RichMapFunction<CheckpointMetadata, CheckpointMetadata> {
private static final long serialVersionUID = 1L;
private final Map<OperatorIdentifier, OperatorIdentifier> uidTransformationMap;
public CheckpointMetadataCheckpointMetadataMapFunction(
Map<OperatorIdentifier, OperatorIdentifier> uidTransformationMap) {
this.uidTransformationMap = new HashMap<>(uidTransformationMap);
}
@Override
@Override
public void close() throws Exception {
if (!uidTransformationMap.isEmpty()) {
throw new FlinkRuntimeException(
"Some identifier changes were never applied!"
+ uidTransformationMap.entrySet().stream()
.map(Map.Entry::toString)
.collect(Collectors.joining("\n\t", "\n\t", "")));
}
}
} |
I believe when `query` is null or empty this should continue clearing the query | public UrlBuilder setQuery(String query) {
if (query != null && !query.isEmpty()) {
with(query, UrlTokenizerState.QUERY);
}
return this;
} | if (query != null && !query.isEmpty()) { | public UrlBuilder setQuery(String query) {
if (query == null || query.isEmpty()) {
this.query.clear();
} else {
with(query, UrlTokenizerState.QUERY);
}
return this;
} | class UrlBuilder {
private static final Map<String, UrlBuilder> PARSED_URLS = new ConcurrentHashMap<>();
private String scheme;
private String host;
private String port;
private String path;
private final Map<String, List<String>> query = new LinkedHashMap<>();
/**
* Set the scheme/protocol that will be used to build the final URL.
*
* @param scheme The scheme/protocol that will be used to build the final URL.
* @return This UrlBuilder so that multiple setters can be chained together.
*/
public UrlBuilder setScheme(String scheme) {
if (scheme == null || scheme.isEmpty()) {
this.scheme = null;
} else {
with(scheme, UrlTokenizerState.SCHEME);
}
return this;
}
/**
* Get the scheme/protocol that has been assigned to this UrlBuilder.
*
* @return the scheme/protocol that has been assigned to this UrlBuilder.
*/
public String getScheme() {
return scheme;
}
/**
* Set the host that will be used to build the final URL.
*
* @param host The host that will be used to build the final URL.
* @return This UrlBuilder so that multiple setters can be chained together.
*/
public UrlBuilder setHost(String host) {
if (host == null || host.isEmpty()) {
this.host = null;
} else {
with(host, UrlTokenizerState.SCHEME_OR_HOST);
}
return this;
}
/**
* Get the host that has been assigned to this UrlBuilder.
*
* @return the host that has been assigned to this UrlBuilder.
*/
public String getHost() {
return host;
}
/**
* Set the port that will be used to build the final URL.
*
* @param port The port that will be used to build the final URL.
* @return This UrlBuilder so that multiple setters can be chained together.
*/
public UrlBuilder setPort(String port) {
if (port == null || port.isEmpty()) {
this.port = null;
} else {
with(port, UrlTokenizerState.PORT);
}
return this;
}
/**
* Set the port that will be used to build the final URL.
*
* @param port The port that will be used to build the final URL.
* @return This UrlBuilder so that multiple setters can be chained together.
*/
public UrlBuilder setPort(int port) {
return setPort(Integer.toString(port));
}
/**
* Get the port that has been assigned to this UrlBuilder.
*
* @return the port that has been assigned to this UrlBuilder.
*/
public Integer getPort() {
return port == null ? null : Integer.valueOf(port);
}
/**
* Set the path that will be used to build the final URL.
*
* @param path The path that will be used to build the final URL.
* @return This UrlBuilder so that multiple setters can be chained together.
*/
public UrlBuilder setPath(String path) {
if (path == null || path.isEmpty()) {
this.path = null;
} else {
with(path, UrlTokenizerState.PATH);
}
return this;
}
/**
* Get the path that has been assigned to this UrlBuilder.
*
* @return the path that has been assigned to this UrlBuilder.
*/
public String getPath() {
return path;
}
/**
* Set the provided query parameter name and encoded value to query string for the final URL.
*
* @param queryParameterName The name of the query parameter.
* @param queryParameterEncodedValue The encoded value of the query parameter.
* @return The provided query parameter name and encoded value to query string for the final URL.
*/
public UrlBuilder setQueryParameter(String queryParameterName, String queryParameterEncodedValue) {
query.put(queryParameterName, new ArrayList<String>(Arrays.asList(queryParameterEncodedValue)));
return this;
}
/**
* Append the provided query parameter name and encoded value to query string for the final URL.
*
* @param queryParameterName The name of the query parameter.
* @param queryParameterEncodedValue The encoded value of the query parameter.
* @return The provided query parameter name and encoded value to query string for the final URL.
*/
public UrlBuilder appendQueryParameter(String queryParameterName, String queryParameterEncodedValue) {
query.compute(queryParameterName, (key, value) -> {
if (value == null) {
return new ArrayList<String>(Arrays.asList(queryParameterEncodedValue));
}
value.add(queryParameterEncodedValue);
return value;
});
return this;
}
/**
* Set the query that will be used to build the final URL.
*
* @param query The query that will be used to build the final URL.
* @return This UrlBuilder so that multiple setters can be chained together.
*/
/**
* Clear the query that will be used to build the final URL.
*
* @return This UrlBuilder so that multiple setters can be chained together.
*/
public UrlBuilder clearQuery() {
if (query != null && !query.isEmpty()) {
query.clear();
}
return this;
}
/**
* Get the query that has been assigned to this UrlBuilder.
*
* @return the query that has been assigned to this UrlBuilder.
*/
public Map<String, List<String>> getQuery() {
return query;
}
/**
* Returns the query string currently configured in this UrlBuilder instance.
* @return A String containing the currently configured query string.
*/
public String getQueryString() {
if (query.isEmpty()) {
return "";
}
StringBuilder queryBuilder = new StringBuilder("?");
for (Map.Entry<String, List<String>> entry : query.entrySet()) {
for (String queryValue : entry.getValue()) {
if (queryBuilder.length() > 1) {
queryBuilder.append("&");
}
queryBuilder.append(entry.getKey());
queryBuilder.append("=");
queryBuilder.append(queryValue);
}
}
return queryBuilder.toString();
}
private UrlBuilder with(String text, UrlTokenizerState startState) {
final UrlTokenizer tokenizer = new UrlTokenizer(text, startState);
while (tokenizer.next()) {
final UrlToken token = tokenizer.current();
final String tokenText = token.text();
final UrlTokenType tokenType = token.type();
switch (tokenType) {
case SCHEME:
scheme = emptyToNull(tokenText);
break;
case HOST:
host = emptyToNull(tokenText);
break;
case PORT:
port = emptyToNull(tokenText);
break;
case PATH:
final String tokenPath = emptyToNull(tokenText);
if (path == null || path.equals("/") || !tokenPath.equals("/")) {
path = tokenPath;
}
break;
case QUERY:
String queryString = emptyToNull(tokenText);
if (queryString != null) {
if (queryString.startsWith("?")) {
queryString = queryString.substring(1);
}
for (String entry : queryString.split("&")) {
String[] nameValue = entry.split("=");
if (nameValue.length == 2) {
appendQueryParameter(nameValue[0], nameValue[1]);
} else {
appendQueryParameter(nameValue[0], "");
}
}
}
break;
default:
break;
}
}
return this;
}
/**
* Get the URL that is being built.
*
* @return The URL that is being built.
* @throws MalformedURLException if the URL is not fully formed.
*/
public URL toUrl() throws MalformedURLException {
return new URL(toString());
}
/**
* Get the string representation of the URL that is being built.
*
* @return The string representation of the URL that is being built.
*/
@Override
public String toString() {
final StringBuilder result = new StringBuilder();
final boolean isAbsolutePath = path != null && (path.startsWith("http:
if (!isAbsolutePath) {
if (scheme != null) {
result.append(scheme);
if (!scheme.endsWith(":
result.append(":
}
}
if (host != null) {
result.append(host);
}
}
if (port != null) {
result.append(":");
result.append(port);
}
if (path != null) {
if (result.length() != 0 && !path.startsWith("/")) {
result.append('/');
}
result.append(path);
}
result.append(getQueryString());
return result.toString();
}
/**
* Parses the passed {@code url} string into a UrlBuilder.
*
* @param url The URL string to parse.
* @return The UrlBuilder that was created from parsing the passed URL string.
*/
public static UrlBuilder parse(String url) {
/*
* Parsing the URL string into a UrlBuilder is a non-trivial operation and many calls into RestProxy will use
* the same root URL string. To save CPU costs we retain a parsed version of the URL string in memory. Given
* that UrlBuilder is mutable we must return a cloned version of the cached UrlBuilder.
*/
String concurrentSafeUrl = (url == null) ? "" : url;
return PARSED_URLS.computeIfAbsent(concurrentSafeUrl, u ->
new UrlBuilder().with(u, UrlTokenizerState.SCHEME_OR_HOST)).copy();
}
/**
* Parse a UrlBuilder from the provided URL object.
*
* @param url The URL object to parse.
* @return The UrlBuilder that was parsed from the URL object.
*/
public static UrlBuilder parse(URL url) {
final UrlBuilder result = new UrlBuilder();
if (url != null) {
final String protocol = url.getProtocol();
if (protocol != null && !protocol.isEmpty()) {
result.setScheme(protocol);
}
final String host = url.getHost();
if (host != null && !host.isEmpty()) {
result.setHost(host);
}
final int port = url.getPort();
if (port != -1) {
result.setPort(port);
}
final String path = url.getPath();
if (path != null && !path.isEmpty()) {
result.setPath(path);
}
final String query = url.getQuery();
if (query != null && !query.isEmpty()) {
result.setQuery(query);
}
}
return result;
}
private static String emptyToNull(String value) {
return value == null || value.isEmpty() ? null : value;
}
private UrlBuilder copy() {
UrlBuilder copy = new UrlBuilder();
copy.scheme = this.scheme;
copy.host = this.host;
copy.path = this.path;
copy.port = this.port;
copy.query.putAll(this.query);
return copy;
}
} | class UrlBuilder {
private static final Map<String, UrlBuilder> PARSED_URLS = new ConcurrentHashMap<>();
private static final int MAX_CACHE_SIZE = 10000;
private String scheme;
private String host;
private String port;
private String path;
private final Map<String, QueryParameter> query = new LinkedHashMap<>();
/**
* Set the scheme/protocol that will be used to build the final URL.
*
* @param scheme The scheme/protocol that will be used to build the final URL.
* @return This UrlBuilder so that multiple setters can be chained together.
*/
public UrlBuilder setScheme(String scheme) {
if (scheme == null || scheme.isEmpty()) {
this.scheme = null;
} else {
with(scheme, UrlTokenizerState.SCHEME);
}
return this;
}
/**
* Get the scheme/protocol that has been assigned to this UrlBuilder.
*
* @return the scheme/protocol that has been assigned to this UrlBuilder.
*/
public String getScheme() {
return scheme;
}
/**
* Set the host that will be used to build the final URL.
*
* @param host The host that will be used to build the final URL.
* @return This UrlBuilder so that multiple setters can be chained together.
*/
public UrlBuilder setHost(String host) {
if (host == null || host.isEmpty()) {
this.host = null;
} else {
with(host, UrlTokenizerState.SCHEME_OR_HOST);
}
return this;
}
/**
* Get the host that has been assigned to this UrlBuilder.
*
* @return the host that has been assigned to this UrlBuilder.
*/
public String getHost() {
return host;
}
/**
* Set the port that will be used to build the final URL.
*
* @param port The port that will be used to build the final URL.
* @return This UrlBuilder so that multiple setters can be chained together.
*/
public UrlBuilder setPort(String port) {
if (port == null || port.isEmpty()) {
this.port = null;
} else {
with(port, UrlTokenizerState.PORT);
}
return this;
}
/**
* Set the port that will be used to build the final URL.
*
* @param port The port that will be used to build the final URL.
* @return This UrlBuilder so that multiple setters can be chained together.
*/
public UrlBuilder setPort(int port) {
return setPort(Integer.toString(port));
}
/**
* Get the port that has been assigned to this UrlBuilder.
*
* @return the port that has been assigned to this UrlBuilder.
*/
public Integer getPort() {
return port == null ? null : Integer.valueOf(port);
}
/**
* Set the path that will be used to build the final URL.
*
* @param path The path that will be used to build the final URL.
* @return This UrlBuilder so that multiple setters can be chained together.
*/
public UrlBuilder setPath(String path) {
if (path == null || path.isEmpty()) {
this.path = null;
} else {
with(path, UrlTokenizerState.PATH);
}
return this;
}
/**
* Get the path that has been assigned to this UrlBuilder.
*
* @return the path that has been assigned to this UrlBuilder.
*/
public String getPath() {
return path;
}
/**
* Set the provided query parameter name and encoded value to query string for the final URL.
*
* @param queryParameterName The name of the query parameter.
* @param queryParameterEncodedValue The encoded value of the query parameter.
* @return The provided query parameter name and encoded value to query string for the final URL.
* @throws NullPointerException if {@code queryParameterName} or {@code queryParameterEncodedValue} are null.
*/
public UrlBuilder setQueryParameter(String queryParameterName, String queryParameterEncodedValue) {
query.put(queryParameterName, new QueryParameter(queryParameterName, queryParameterEncodedValue));
return this;
}
/**
* Append the provided query parameter name and encoded value to query string for the final URL.
*
* @param queryParameterName The name of the query parameter.
* @param queryParameterEncodedValue The encoded value of the query parameter.
* @return The provided query parameter name and encoded value to query string for the final URL.
* @throws NullPointerException if {@code queryParameterName} or {@code queryParameterEncodedValue} are null.
*/
public UrlBuilder addQueryParameter(String queryParameterName, String queryParameterEncodedValue) {
query.compute(queryParameterName, (key, value) -> {
if (value == null) {
return new QueryParameter(queryParameterName, queryParameterEncodedValue);
}
value.addValue(queryParameterEncodedValue);
return value;
});
return this;
}
/**
* Set the query that will be used to build the final URL.
*
* @param query The query that will be used to build the final URL.
* @return This UrlBuilder so that multiple setters can be chained together.
*/
/**
* Clear the query that will be used to build the final URL.
*
* @return This UrlBuilder so that multiple setters can be chained together.
*/
public UrlBuilder clearQuery() {
if (query.isEmpty()) {
return this;
}
query.clear();
return this;
}
/**
* Get the query that has been assigned to this UrlBuilder.
*
* @return the query that has been assigned to this UrlBuilder.
*/
public Map<String, String> getQuery() {
final Map<String, String> singleKeyValueQuery =
this.query.entrySet()
.stream()
.collect(Collectors.toMap(
e -> e.getKey(),
e -> {
QueryParameter parameter = e.getValue();
String value = null;
if (parameter != null) {
value = parameter.getValue();
}
return value;
}
));
return singleKeyValueQuery;
}
/**
* Returns the query string currently configured in this UrlBuilder instance.
* @return A String containing the currently configured query string.
*/
public String getQueryString() {
if (query.isEmpty()) {
return "";
}
StringBuilder queryBuilder = new StringBuilder("?");
for (Map.Entry<String, QueryParameter> entry : query.entrySet()) {
for (String queryValue : entry.getValue().getValuesList()) {
if (queryBuilder.length() > 1) {
queryBuilder.append("&");
}
queryBuilder.append(entry.getKey());
queryBuilder.append("=");
queryBuilder.append(queryValue);
}
}
return queryBuilder.toString();
}
private UrlBuilder with(String text, UrlTokenizerState startState) {
final UrlTokenizer tokenizer = new UrlTokenizer(text, startState);
while (tokenizer.next()) {
final UrlToken token = tokenizer.current();
final String tokenText = token.text();
final UrlTokenType tokenType = token.type();
switch (tokenType) {
case SCHEME:
scheme = emptyToNull(tokenText);
break;
case HOST:
host = emptyToNull(tokenText);
break;
case PORT:
port = emptyToNull(tokenText);
break;
case PATH:
final String tokenPath = emptyToNull(tokenText);
if (path == null || path.equals("/") || !tokenPath.equals("/")) {
path = tokenPath;
}
break;
case QUERY:
String queryString = emptyToNull(tokenText);
if (queryString != null) {
if (queryString.startsWith("?")) {
queryString = queryString.substring(1);
}
for (String entry : queryString.split("&")) {
String[] nameValue = entry.split("=");
if (nameValue.length == 2) {
addQueryParameter(nameValue[0], nameValue[1]);
} else {
addQueryParameter(nameValue[0], "");
}
}
}
break;
default:
break;
}
}
return this;
}
/**
* Get the URL that is being built.
*
* @return The URL that is being built.
* @throws MalformedURLException if the URL is not fully formed.
*/
public URL toUrl() throws MalformedURLException {
return new URL(toString());
}
/**
* Get the string representation of the URL that is being built.
*
* @return The string representation of the URL that is being built.
*/
@Override
public String toString() {
final StringBuilder result = new StringBuilder();
final boolean isAbsolutePath = path != null && (path.startsWith("http:
if (!isAbsolutePath) {
if (scheme != null) {
result.append(scheme);
if (!scheme.endsWith(":
result.append(":
}
}
if (host != null) {
result.append(host);
}
}
if (port != null) {
result.append(":");
result.append(port);
}
if (path != null) {
if (result.length() != 0 && !path.startsWith("/")) {
result.append('/');
}
result.append(path);
}
result.append(getQueryString());
return result.toString();
}
/**
* Returns the map of parsed URLs and their {@link UrlBuilder UrlBuilders}
* @return the map of parsed URLs and their {@link UrlBuilder UrlBuilders}
*/
static Map<String, UrlBuilder> getParsedUrls() {
return PARSED_URLS;
}
/**
* Parses the passed {@code url} string into a UrlBuilder.
*
* @param url The URL string to parse.
* @return The UrlBuilder that was created from parsing the passed URL string.
*/
public static UrlBuilder parse(String url) {
/*
* Parsing the URL string into a UrlBuilder is a non-trivial operation and many calls into RestProxy will use
* the same root URL string. To save CPU costs we retain a parsed version of the URL string in memory. Given
* that UrlBuilder is mutable we must return a cloned version of the cached UrlBuilder.
*/
String concurrentSafeUrl = (url == null) ? "" : url;
if (PARSED_URLS.size() >= MAX_CACHE_SIZE) {
PARSED_URLS.clear();
}
return PARSED_URLS.computeIfAbsent(concurrentSafeUrl, u ->
new UrlBuilder().with(u, UrlTokenizerState.SCHEME_OR_HOST)).copy();
}
/**
* Parse a UrlBuilder from the provided URL object.
*
* @param url The URL object to parse.
* @return The UrlBuilder that was parsed from the URL object.
*/
public static UrlBuilder parse(URL url) {
final UrlBuilder result = new UrlBuilder();
if (url != null) {
final String protocol = url.getProtocol();
if (protocol != null && !protocol.isEmpty()) {
result.setScheme(protocol);
}
final String host = url.getHost();
if (host != null && !host.isEmpty()) {
result.setHost(host);
}
final int port = url.getPort();
if (port != -1) {
result.setPort(port);
}
final String path = url.getPath();
if (path != null && !path.isEmpty()) {
result.setPath(path);
}
final String query = url.getQuery();
if (query != null && !query.isEmpty()) {
result.setQuery(query);
}
}
return result;
}
private static String emptyToNull(String value) {
return value == null || value.isEmpty() ? null : value;
}
private UrlBuilder copy() {
UrlBuilder copy = new UrlBuilder();
copy.scheme = this.scheme;
copy.host = this.host;
copy.path = this.path;
copy.port = this.port;
copy.query.putAll(this.query);
return copy;
}
} |
now that using async client, ideally would not return constant success value here | public CompletableResultCode export(Collection<SpanData> spans) {
try {
List<TelemetryItem> telemetryItems = new ArrayList<>();
for (SpanData span : spans) {
logger.verbose("exporting span: {}", span);
export(span, telemetryItems);
}
client.export(telemetryItems)
.subscriberContext(Context.of(Tracer.DISABLE_TRACING_KEY, true))
.subscribe();
return CompletableResultCode.ofSuccess();
} catch (Throwable t) {
logger.error(t.getMessage(), t);
return CompletableResultCode.ofFailure();
}
} | return CompletableResultCode.ofSuccess(); | public CompletableResultCode export(Collection<SpanData> spans) {
try {
List<TelemetryItem> telemetryItems = new ArrayList<>();
for (SpanData span : spans) {
logger.verbose("exporting span: {}", span);
export(span, telemetryItems);
}
client.export(telemetryItems)
.subscriberContext(Context.of(Tracer.DISABLE_TRACING_KEY, true))
.subscribe();
return CompletableResultCode.ofSuccess();
} catch (Throwable t) {
logger.error(t.getMessage(), t);
return CompletableResultCode.ofFailure();
}
} | class AzureMonitorExporter implements SpanExporter {
private static final Pattern COMPONENT_PATTERN = Pattern
.compile("io\\.opentelemetry\\.auto\\.([^0-9]*)(-[0-9.]*)?");
private static final Set<String> SQL_DB_SYSTEMS;
static {
Set<String> dbSystems = new HashSet<>();
dbSystems.add("db2");
dbSystems.add("derby");
dbSystems.add("mariadb");
dbSystems.add("mssql");
dbSystems.add("mysql");
dbSystems.add("oracle");
dbSystems.add("postgresql");
dbSystems.add("sqlite");
dbSystems.add("other_sql");
dbSystems.add("hsqldb");
dbSystems.add("h2");
SQL_DB_SYSTEMS = Collections.unmodifiableSet(dbSystems);
}
private final MonitorExporterAsyncClient client;
private final ClientLogger logger = new ClientLogger(AzureMonitorExporter.class);
private final String instrumentationKey;
private final String telemetryItemNamePrefix;
/**
* Creates an instance of exporter that is configured with given exporter client that sends telemetry events to
* Application Insights resource identified by the instrumentation key.
*
* @param client The client used to send data to Azure Monitor.
* @param instrumentationKey The instrumentation key of Application Insights resource.
*/
AzureMonitorExporter(MonitorExporterAsyncClient client, String instrumentationKey) {
this.client = client;
this.instrumentationKey = instrumentationKey;
String formattedInstrumentationKey = instrumentationKey.replaceAll("-", "");
this.telemetryItemNamePrefix = "Microsoft.ApplicationInsights." + formattedInstrumentationKey + ".";
}
/**
* {@inheritDoc}
*/
@Override
/**
* {@inheritDoc}
*/
@Override
public CompletableResultCode flush() {
return CompletableResultCode.ofSuccess();
}
/**
* {@inheritDoc}
*/
@Override
public CompletableResultCode shutdown() {
return CompletableResultCode.ofSuccess();
}
private void export(SpanData span, List<TelemetryItem> telemetryItems) {
Span.Kind kind = span.getKind();
String instrumentationName = span.getInstrumentationLibraryInfo().getName();
Matcher matcher = COMPONENT_PATTERN.matcher(instrumentationName);
String stdComponent = matcher.matches() ? matcher.group(1) : null;
if ("jms".equals(stdComponent) && !span.getParentSpanContext().isValid() && kind == Span.Kind.CLIENT) {
return;
}
if (kind == Span.Kind.INTERNAL) {
if (!span.getParentSpanContext().isValid()) {
exportRequest(stdComponent, span, telemetryItems);
} else if (span.getName().equals("EventHubs.message")) {
exportRemoteDependency(stdComponent, span, false, telemetryItems);
} else {
exportRemoteDependency(stdComponent, span, true, telemetryItems);
}
} else if (kind == Span.Kind.CLIENT || kind == Span.Kind.PRODUCER) {
exportRemoteDependency(stdComponent, span, false, telemetryItems);
} else if (kind == Span.Kind.SERVER || kind == Span.Kind.CONSUMER) {
exportRequest(stdComponent, span, telemetryItems);
} else {
throw logger.logExceptionAsError(new UnsupportedOperationException(kind.name()));
}
}
private static List<TelemetryExceptionDetails> minimalParse(String errorStack) {
TelemetryExceptionDetails details = new TelemetryExceptionDetails();
String line = errorStack.split("\n")[0];
int index = line.indexOf(": ");
if (index != -1) {
details.setTypeName(line.substring(0, index));
details.setMessage(line.substring(index + 2));
} else {
details.setTypeName(line);
}
details.setStack(errorStack);
return Arrays.asList(details);
}
private void exportRemoteDependency(String stdComponent, SpanData span, boolean inProc,
List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
RemoteDependencyData remoteDependencyData = new RemoteDependencyData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "RemoteDependency");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
remoteDependencyData.setProperties(new HashMap<>());
remoteDependencyData.setVersion(2);
monitorBase.setBaseType("RemoteDependencyData");
monitorBase.setBaseData(remoteDependencyData);
addLinks(remoteDependencyData.getProperties(), span.getLinks());
remoteDependencyData.setName(span.getName());
span.getInstrumentationLibraryInfo().getName();
Attributes attributes = span.getAttributes();
if (inProc) {
remoteDependencyData.setType("InProc");
} else {
if (attributes.get(SemanticAttributes.HTTP_METHOD) != null) {
applyHttpRequestSpan(attributes, remoteDependencyData);
} else if (attributes.get(SemanticAttributes.DB_SYSTEM) != null) {
applyDatabaseQuerySpan(attributes, remoteDependencyData, stdComponent);
} else if (span.getName().equals("EventHubs.send")) {
remoteDependencyData.setType("Microsoft.EventHub");
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes,
SemanticAttributes.MESSAGING_DESTINATION.getKey());
remoteDependencyData.setTarget(peerAddress + "/" + destination);
} else if (span.getName().equals("EventHubs.message")) {
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes,
SemanticAttributes.MESSAGING_DESTINATION.getKey());
if (peerAddress != null) {
remoteDependencyData.setTarget(peerAddress + "/" + destination);
}
remoteDependencyData.setType("Microsoft.EventHub");
} else if ("kafka-clients".equals(stdComponent)) {
remoteDependencyData.setType("Kafka");
remoteDependencyData.setTarget(span.getName());
} else if ("jms".equals(stdComponent)) {
remoteDependencyData.setType("JMS");
remoteDependencyData.setTarget(span.getName());
}
}
remoteDependencyData.setId(span.getSpanId());
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), span.getTraceId());
String parentSpanId = span.getParentSpanId();
if (span.getParentSpanContext().isValid()) {
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), parentSpanId);
}
telemetryItem.setTime(getFormattedTime(span.getStartEpochNanos()));
remoteDependencyData
.setDuration(getFormattedDuration(Duration.ofNanos(span.getEndEpochNanos() - span.getStartEpochNanos())));
remoteDependencyData.setSuccess(span.getStatus().isOk());
String description = span.getStatus().getDescription();
if (description != null) {
remoteDependencyData.getProperties().put("statusDescription", description);
}
Double samplingPercentage = 100.0;
if (stdComponent == null) {
addExtraAttributes(remoteDependencyData.getProperties(), attributes);
}
if (samplingPercentage != null) {
telemetryItem.setSampleRate(samplingPercentage.floatValue());
}
telemetryItems.add(telemetryItem);
exportEvents(span, samplingPercentage, telemetryItems);
}
private void applyDatabaseQuerySpan(Attributes attributes, RemoteDependencyData rd,
String component) {
String type = attributes.get(SemanticAttributes.DB_SYSTEM);
if (SQL_DB_SYSTEMS.contains(type)) {
type = "SQL";
}
rd.setType(type);
rd.setData(attributes.get(SemanticAttributes.DB_STATEMENT));
String dbUrl = attributes.get(SemanticAttributes.DB_CONNECTION_STRING);
if (dbUrl == null) {
rd.setTarget(type);
} else {
String dbInstance = attributes.get(SemanticAttributes.DB_NAME);
if (dbInstance != null) {
dbUrl += " | " + dbInstance;
}
if ("jdbc".equals(component)) {
rd.setTarget("jdbc:" + dbUrl);
} else {
rd.setTarget(dbUrl);
}
}
}
private void applyHttpRequestSpan(Attributes attributes,
RemoteDependencyData remoteDependencyData) {
remoteDependencyData.setType("Http (tracked component)");
String method = attributes.get(SemanticAttributes.HTTP_METHOD);
String url = attributes.get(SemanticAttributes.HTTP_URL);
Long httpStatusCode = attributes.get(SemanticAttributes.HTTP_STATUS_CODE);
if (httpStatusCode != null) {
remoteDependencyData.setResultCode(Long.toString(httpStatusCode));
}
if (url != null) {
try {
URI uriObject = new URI(url);
String target = createTarget(uriObject);
remoteDependencyData.setTarget(target);
String path = uriObject.getPath();
if (CoreUtils.isNullOrEmpty(path)) {
remoteDependencyData.setName(method + " /");
} else {
remoteDependencyData.setName(method + " " + path);
}
} catch (URISyntaxException e) {
logger.error(e.getMessage());
}
}
}
private void exportRequest(String stdComponent, SpanData span, List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
RequestData requestData = new RequestData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Request");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
requestData.setProperties(new HashMap<>());
requestData.setVersion(2);
monitorBase.setBaseType("RequestData");
monitorBase.setBaseData(requestData);
Attributes attributes = span.getAttributes();
if ("kafka-clients".equals(stdComponent)) {
requestData.setSource(span.getName());
} else if ("jms".equals(stdComponent)) {
requestData.setSource(span.getName());
}
addLinks(requestData.getProperties(), span.getLinks());
Long httpStatusCode = attributes.get(SemanticAttributes.HTTP_STATUS_CODE);
requestData.setResponseCode("200");
if (httpStatusCode != null) {
requestData.setResponseCode(Long.toString(httpStatusCode));
}
String httpUrl = removeAttributeString(attributes, SemanticAttributes.HTTP_URL.getKey());
if (httpUrl != null) {
requestData.setUrl(httpUrl);
}
String httpMethod = removeAttributeString(attributes, SemanticAttributes.HTTP_METHOD.getKey());
String name = span.getName();
if (httpMethod != null && name.startsWith("/")) {
name = httpMethod + " " + name;
}
requestData.setName(name);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_NAME.toString(), name);
if (span.getName().equals("EventHubs.process")) {
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes, SemanticAttributes.MESSAGING_DESTINATION.getKey());
requestData.setSource(peerAddress + "/" + destination);
}
requestData.setId(span.getSpanId());
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), span.getTraceId());
String aiLegacyParentId = span.getTraceState().get("ai-legacy-parent-id");
if (aiLegacyParentId != null) {
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), aiLegacyParentId);
String aiLegacyOperationId = span.getTraceState().get("ai-legacy-operation-id");
if (aiLegacyOperationId != null) {
telemetryItem.getTags().putIfAbsent("ai_legacyRootID", aiLegacyOperationId);
}
} else {
String parentSpanId = span.getParentSpanId();
if (span.getParentSpanContext().isValid()) {
telemetryItem.getTags()
.put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), parentSpanId);
}
}
long startEpochNanos = span.getStartEpochNanos();
telemetryItem.setTime(getFormattedTime(startEpochNanos));
Duration duration = Duration.ofNanos(span.getEndEpochNanos() - startEpochNanos);
requestData.setDuration(getFormattedDuration(duration));
requestData.setSuccess(span.getStatus().isOk());
String description = span.getStatus().getDescription();
if (description != null) {
requestData.getProperties().put("statusDescription", description);
}
Double samplingPercentage = removeAiSamplingPercentage(attributes);
samplingPercentage = samplingPercentage == null ? 100.0 : samplingPercentage;
if (stdComponent == null) {
addExtraAttributes(requestData.getProperties(), attributes);
}
telemetryItem.setSampleRate(samplingPercentage.floatValue());
telemetryItems.add(telemetryItem);
exportEvents(span, samplingPercentage, telemetryItems);
}
private void exportEvents(SpanData span, Double samplingPercentage, List<TelemetryItem> telemetryItems) {
boolean foundException = false;
for (EventData event : span.getEvents()) {
TelemetryItem telemetryItem = new TelemetryItem();
TelemetryEventData eventData = new TelemetryEventData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Event");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
eventData.setProperties(new HashMap<>());
eventData.setVersion(2);
monitorBase.setBaseType("EventData");
monitorBase.setBaseData(eventData);
eventData.setName(event.getName());
String operationId = span.getTraceId();
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), operationId);
telemetryItem.getTags()
.put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), span.getParentSpanId());
telemetryItem.setTime(getFormattedTime(event.getEpochNanos()));
addExtraAttributes(eventData.getProperties(), event.getAttributes());
if (event.getAttributes().get(SemanticAttributes.EXCEPTION_TYPE) != null
|| event.getAttributes().get(SemanticAttributes.EXCEPTION_MESSAGE) != null) {
if (!foundException) {
Object stacktrace = event.getAttributes()
.get(SemanticAttributes.EXCEPTION_STACKTRACE);
if (stacktrace != null) {
trackException(stacktrace.toString(), span, operationId,
span.getSpanId(), samplingPercentage, telemetryItems);
}
}
foundException = true;
} else {
telemetryItem.setSampleRate(samplingPercentage.floatValue());
telemetryItems.add(telemetryItem);
}
}
}
private void trackException(String errorStack, SpanData span, String operationId,
String id, Double samplingPercentage, List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
TelemetryExceptionData exceptionData = new TelemetryExceptionData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Exception");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
exceptionData.setProperties(new HashMap<>());
exceptionData.setVersion(2);
monitorBase.setBaseType("ExceptionData");
monitorBase.setBaseData(exceptionData);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), operationId);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), id);
telemetryItem.setTime(getFormattedTime(span.getEndEpochNanos()));
telemetryItem.setSampleRate(samplingPercentage.floatValue());
exceptionData.setExceptions(minimalParse(errorStack));
telemetryItems.add(telemetryItem);
}
private static String getFormattedDuration(Duration duration) {
return duration.toDays() + "." + duration.toHours() + ":" + duration.toMinutes() + ":" + duration.getSeconds()
+ "." + duration.toMillis();
}
private static String getFormattedTime(long epochNanos) {
return Instant.ofEpochMilli(NANOSECONDS.toMillis(epochNanos))
.atOffset(ZoneOffset.UTC)
.format(DateTimeFormatter.ISO_DATE_TIME);
}
private static void addLinks(Map<String, String> properties, List<LinkData> links) {
if (links.isEmpty()) {
return;
}
StringBuilder sb = new StringBuilder();
sb.append("[");
boolean first = true;
for (LinkData link : links) {
if (!first) {
sb.append(",");
}
sb.append("{\"operation_Id\":\"");
sb.append(link.getSpanContext().getTraceIdAsHexString());
sb.append("\",\"id\":\"");
sb.append(link.getSpanContext().getSpanIdAsHexString());
sb.append("\"}");
first = false;
}
sb.append("]");
properties.put("_MS.links", sb.toString());
}
private static String removeAttributeString(Attributes attributes, String attributeName) {
Object attributeValue = attributes.get(AttributeKey.stringKey(attributeName));
if (attributeValue == null) {
return null;
} else if (attributeValue instanceof String) {
return attributeValue.toString();
} else {
return null;
}
}
private static Double removeAttributeDouble(Attributes attributes, String attributeName) {
Object attributeValue = attributes.get(AttributeKey.stringKey(attributeName));
if (attributeValue == null) {
return null;
} else if (attributeValue instanceof Double) {
return (Double) attributeValue;
} else {
return null;
}
}
private static String createTarget(URI uriObject) {
String target = uriObject.getHost();
if (uriObject.getPort() != 80 && uriObject.getPort() != 443 && uriObject.getPort() != -1) {
target += ":" + uriObject.getPort();
}
return target;
}
private static String getStringValue(AttributeKey<?> attributeKey, Object value) {
switch (attributeKey.getType()) {
case STRING:
case BOOLEAN:
case LONG:
case DOUBLE:
return String.valueOf(value);
case STRING_ARRAY:
case BOOLEAN_ARRAY:
case LONG_ARRAY:
case DOUBLE_ARRAY:
return join((List<?>) value);
default:
return null;
}
}
private static <T> String join(List<T> values) {
StringBuilder sb = new StringBuilder();
if (CoreUtils.isNullOrEmpty(values)) {
return sb.toString();
}
for (int i = 0; i < values.size() - 1; i++) {
sb.append(values.get(i));
sb.append(", ");
}
sb.append(values.get(values.size() - 1));
return sb.toString();
}
private static Double removeAiSamplingPercentage(Attributes attributes) {
return removeAttributeDouble(attributes, "ai.sampling.percentage");
}
private static void addExtraAttributes(final Map<String, String> properties, Attributes attributes) {
attributes.forEach((key, value) -> {
String val = getStringValue(key, value);
if (val != null) {
properties.put(key.toString(), val);
}
});
}
} | class AzureMonitorExporter implements SpanExporter {
private static final Pattern COMPONENT_PATTERN = Pattern
.compile("io\\.opentelemetry\\.auto\\.([^0-9]*)(-[0-9.]*)?");
private static final Set<String> SQL_DB_SYSTEMS;
static {
Set<String> dbSystems = new HashSet<>();
dbSystems.add("db2");
dbSystems.add("derby");
dbSystems.add("mariadb");
dbSystems.add("mssql");
dbSystems.add("mysql");
dbSystems.add("oracle");
dbSystems.add("postgresql");
dbSystems.add("sqlite");
dbSystems.add("other_sql");
dbSystems.add("hsqldb");
dbSystems.add("h2");
SQL_DB_SYSTEMS = Collections.unmodifiableSet(dbSystems);
}
private final MonitorExporterAsyncClient client;
private final ClientLogger logger = new ClientLogger(AzureMonitorExporter.class);
private final String instrumentationKey;
private final String telemetryItemNamePrefix;
/**
* Creates an instance of exporter that is configured with given exporter client that sends telemetry events to
* Application Insights resource identified by the instrumentation key.
*
* @param client The client used to send data to Azure Monitor.
* @param instrumentationKey The instrumentation key of Application Insights resource.
*/
AzureMonitorExporter(MonitorExporterAsyncClient client, String instrumentationKey) {
this.client = client;
this.instrumentationKey = instrumentationKey;
String formattedInstrumentationKey = instrumentationKey.replaceAll("-", "");
this.telemetryItemNamePrefix = "Microsoft.ApplicationInsights." + formattedInstrumentationKey + ".";
}
/**
* {@inheritDoc}
*/
@Override
/**
* {@inheritDoc}
*/
@Override
public CompletableResultCode flush() {
return CompletableResultCode.ofSuccess();
}
/**
* {@inheritDoc}
*/
@Override
public CompletableResultCode shutdown() {
return CompletableResultCode.ofSuccess();
}
private void export(SpanData span, List<TelemetryItem> telemetryItems) {
Span.Kind kind = span.getKind();
String instrumentationName = span.getInstrumentationLibraryInfo().getName();
Matcher matcher = COMPONENT_PATTERN.matcher(instrumentationName);
String stdComponent = matcher.matches() ? matcher.group(1) : null;
if ("jms".equals(stdComponent) && !span.getParentSpanContext().isValid() && kind == Span.Kind.CLIENT) {
return;
}
if (kind == Span.Kind.INTERNAL) {
if (!span.getParentSpanContext().isValid()) {
exportRequest(stdComponent, span, telemetryItems);
} else if (span.getName().equals("EventHubs.message")) {
exportRemoteDependency(stdComponent, span, false, telemetryItems);
} else {
exportRemoteDependency(stdComponent, span, true, telemetryItems);
}
} else if (kind == Span.Kind.CLIENT || kind == Span.Kind.PRODUCER) {
exportRemoteDependency(stdComponent, span, false, telemetryItems);
} else if (kind == Span.Kind.SERVER || kind == Span.Kind.CONSUMER) {
exportRequest(stdComponent, span, telemetryItems);
} else {
throw logger.logExceptionAsError(new UnsupportedOperationException(kind.name()));
}
}
private static List<TelemetryExceptionDetails> minimalParse(String errorStack) {
TelemetryExceptionDetails details = new TelemetryExceptionDetails();
String line = errorStack.split("\n")[0];
int index = line.indexOf(": ");
if (index != -1) {
details.setTypeName(line.substring(0, index));
details.setMessage(line.substring(index + 2));
} else {
details.setTypeName(line);
}
details.setStack(errorStack);
return Arrays.asList(details);
}
private void exportRemoteDependency(String stdComponent, SpanData span, boolean inProc,
List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
RemoteDependencyData remoteDependencyData = new RemoteDependencyData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "RemoteDependency");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
remoteDependencyData.setProperties(new HashMap<>());
remoteDependencyData.setVersion(2);
monitorBase.setBaseType("RemoteDependencyData");
monitorBase.setBaseData(remoteDependencyData);
addLinks(remoteDependencyData.getProperties(), span.getLinks());
remoteDependencyData.setName(span.getName());
span.getInstrumentationLibraryInfo().getName();
Attributes attributes = span.getAttributes();
if (inProc) {
remoteDependencyData.setType("InProc");
} else {
if (attributes.get(SemanticAttributes.HTTP_METHOD) != null) {
applyHttpRequestSpan(attributes, remoteDependencyData);
} else if (attributes.get(SemanticAttributes.DB_SYSTEM) != null) {
applyDatabaseQuerySpan(attributes, remoteDependencyData, stdComponent);
} else if (span.getName().equals("EventHubs.send")) {
remoteDependencyData.setType("Microsoft.EventHub");
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes,
SemanticAttributes.MESSAGING_DESTINATION.getKey());
remoteDependencyData.setTarget(peerAddress + "/" + destination);
} else if (span.getName().equals("EventHubs.message")) {
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes,
SemanticAttributes.MESSAGING_DESTINATION.getKey());
if (peerAddress != null) {
remoteDependencyData.setTarget(peerAddress + "/" + destination);
}
remoteDependencyData.setType("Microsoft.EventHub");
} else if ("kafka-clients".equals(stdComponent)) {
remoteDependencyData.setType("Kafka");
remoteDependencyData.setTarget(span.getName());
} else if ("jms".equals(stdComponent)) {
remoteDependencyData.setType("JMS");
remoteDependencyData.setTarget(span.getName());
}
}
remoteDependencyData.setId(span.getSpanId());
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), span.getTraceId());
String parentSpanId = span.getParentSpanId();
if (span.getParentSpanContext().isValid()) {
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), parentSpanId);
}
telemetryItem.setTime(getFormattedTime(span.getStartEpochNanos()));
remoteDependencyData
.setDuration(getFormattedDuration(Duration.ofNanos(span.getEndEpochNanos() - span.getStartEpochNanos())));
remoteDependencyData.setSuccess(span.getStatus().isOk());
String description = span.getStatus().getDescription();
if (description != null) {
remoteDependencyData.getProperties().put("statusDescription", description);
}
Double samplingPercentage = 100.0;
if (stdComponent == null) {
addExtraAttributes(remoteDependencyData.getProperties(), attributes);
}
telemetryItem.setSampleRate(samplingPercentage.floatValue());
telemetryItems.add(telemetryItem);
exportEvents(span, samplingPercentage, telemetryItems);
}
private void applyDatabaseQuerySpan(Attributes attributes, RemoteDependencyData rd,
String component) {
String type = attributes.get(SemanticAttributes.DB_SYSTEM);
if (SQL_DB_SYSTEMS.contains(type)) {
type = "SQL";
}
rd.setType(type);
rd.setData(attributes.get(SemanticAttributes.DB_STATEMENT));
String dbUrl = attributes.get(SemanticAttributes.DB_CONNECTION_STRING);
if (dbUrl == null) {
rd.setTarget(type);
} else {
String dbInstance = attributes.get(SemanticAttributes.DB_NAME);
if (dbInstance != null) {
dbUrl += " | " + dbInstance;
}
if ("jdbc".equals(component)) {
rd.setTarget("jdbc:" + dbUrl);
} else {
rd.setTarget(dbUrl);
}
}
}
private void applyHttpRequestSpan(Attributes attributes,
RemoteDependencyData remoteDependencyData) {
remoteDependencyData.setType("Http (tracked component)");
String method = attributes.get(SemanticAttributes.HTTP_METHOD);
String url = attributes.get(SemanticAttributes.HTTP_URL);
Long httpStatusCode = attributes.get(SemanticAttributes.HTTP_STATUS_CODE);
if (httpStatusCode != null) {
remoteDependencyData.setResultCode(Long.toString(httpStatusCode));
}
if (url != null) {
try {
URI uriObject = new URI(url);
String target = createTarget(uriObject);
remoteDependencyData.setTarget(target);
String path = uriObject.getPath();
if (CoreUtils.isNullOrEmpty(path)) {
remoteDependencyData.setName(method + " /");
} else {
remoteDependencyData.setName(method + " " + path);
}
} catch (URISyntaxException e) {
logger.error(e.getMessage());
}
}
}
private void exportRequest(String stdComponent, SpanData span, List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
RequestData requestData = new RequestData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Request");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
requestData.setProperties(new HashMap<>());
requestData.setVersion(2);
monitorBase.setBaseType("RequestData");
monitorBase.setBaseData(requestData);
Attributes attributes = span.getAttributes();
if ("kafka-clients".equals(stdComponent)) {
requestData.setSource(span.getName());
} else if ("jms".equals(stdComponent)) {
requestData.setSource(span.getName());
}
addLinks(requestData.getProperties(), span.getLinks());
Long httpStatusCode = attributes.get(SemanticAttributes.HTTP_STATUS_CODE);
requestData.setResponseCode("200");
if (httpStatusCode != null) {
requestData.setResponseCode(Long.toString(httpStatusCode));
}
String httpUrl = removeAttributeString(attributes, SemanticAttributes.HTTP_URL.getKey());
if (httpUrl != null) {
requestData.setUrl(httpUrl);
}
String httpMethod = removeAttributeString(attributes, SemanticAttributes.HTTP_METHOD.getKey());
String name = span.getName();
if (httpMethod != null && name.startsWith("/")) {
name = httpMethod + " " + name;
}
requestData.setName(name);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_NAME.toString(), name);
if (span.getName().equals("EventHubs.process")) {
String peerAddress = removeAttributeString(attributes, SemanticAttributes.PEER_SERVICE.getKey());
String destination = removeAttributeString(attributes, SemanticAttributes.MESSAGING_DESTINATION.getKey());
requestData.setSource(peerAddress + "/" + destination);
}
requestData.setId(span.getSpanId());
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), span.getTraceId());
String aiLegacyParentId = span.getTraceState().get("ai-legacy-parent-id");
if (aiLegacyParentId != null) {
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), aiLegacyParentId);
String aiLegacyOperationId = span.getTraceState().get("ai-legacy-operation-id");
if (aiLegacyOperationId != null) {
telemetryItem.getTags().putIfAbsent("ai_legacyRootID", aiLegacyOperationId);
}
} else {
String parentSpanId = span.getParentSpanId();
if (span.getParentSpanContext().isValid()) {
telemetryItem.getTags()
.put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), parentSpanId);
}
}
long startEpochNanos = span.getStartEpochNanos();
telemetryItem.setTime(getFormattedTime(startEpochNanos));
Duration duration = Duration.ofNanos(span.getEndEpochNanos() - startEpochNanos);
requestData.setDuration(getFormattedDuration(duration));
requestData.setSuccess(span.getStatus().isOk());
String description = span.getStatus().getDescription();
if (description != null) {
requestData.getProperties().put("statusDescription", description);
}
Double samplingPercentage = 100.0;
if (stdComponent == null) {
addExtraAttributes(requestData.getProperties(), attributes);
}
telemetryItem.setSampleRate(samplingPercentage.floatValue());
telemetryItems.add(telemetryItem);
exportEvents(span, samplingPercentage, telemetryItems);
}
private void exportEvents(SpanData span, Double samplingPercentage, List<TelemetryItem> telemetryItems) {
boolean foundException = false;
for (EventData event : span.getEvents()) {
TelemetryItem telemetryItem = new TelemetryItem();
TelemetryEventData eventData = new TelemetryEventData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Event");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
eventData.setProperties(new HashMap<>());
eventData.setVersion(2);
monitorBase.setBaseType("EventData");
monitorBase.setBaseData(eventData);
eventData.setName(event.getName());
String operationId = span.getTraceId();
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), operationId);
telemetryItem.getTags()
.put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), span.getParentSpanId());
telemetryItem.setTime(getFormattedTime(event.getEpochNanos()));
addExtraAttributes(eventData.getProperties(), event.getAttributes());
if (event.getAttributes().get(SemanticAttributes.EXCEPTION_TYPE) != null
|| event.getAttributes().get(SemanticAttributes.EXCEPTION_MESSAGE) != null) {
if (!foundException) {
Object stacktrace = event.getAttributes()
.get(SemanticAttributes.EXCEPTION_STACKTRACE);
if (stacktrace != null) {
trackException(stacktrace.toString(), span, operationId,
span.getSpanId(), samplingPercentage, telemetryItems);
}
}
foundException = true;
} else {
telemetryItem.setSampleRate(samplingPercentage.floatValue());
telemetryItems.add(telemetryItem);
}
}
}
private void trackException(String errorStack, SpanData span, String operationId,
String id, Double samplingPercentage, List<TelemetryItem> telemetryItems) {
TelemetryItem telemetryItem = new TelemetryItem();
TelemetryExceptionData exceptionData = new TelemetryExceptionData();
MonitorBase monitorBase = new MonitorBase();
telemetryItem.setTags(new HashMap<>());
telemetryItem.setName(telemetryItemNamePrefix + "Exception");
telemetryItem.setVersion(1);
telemetryItem.setInstrumentationKey(instrumentationKey);
telemetryItem.setData(monitorBase);
exceptionData.setProperties(new HashMap<>());
exceptionData.setVersion(2);
monitorBase.setBaseType("ExceptionData");
monitorBase.setBaseData(exceptionData);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_ID.toString(), operationId);
telemetryItem.getTags().put(ContextTagKeys.AI_OPERATION_PARENT_ID.toString(), id);
telemetryItem.setTime(getFormattedTime(span.getEndEpochNanos()));
telemetryItem.setSampleRate(samplingPercentage.floatValue());
exceptionData.setExceptions(minimalParse(errorStack));
telemetryItems.add(telemetryItem);
}
private static String getFormattedDuration(Duration duration) {
return duration.toDays() + "." + duration.toHours() + ":" + duration.toMinutes() + ":" + duration.getSeconds()
+ "." + duration.toMillis();
}
private static String getFormattedTime(long epochNanos) {
return Instant.ofEpochMilli(NANOSECONDS.toMillis(epochNanos))
.atOffset(ZoneOffset.UTC)
.format(DateTimeFormatter.ISO_DATE_TIME);
}
private static void addLinks(Map<String, String> properties, List<LinkData> links) {
if (links.isEmpty()) {
return;
}
StringBuilder sb = new StringBuilder();
sb.append("[");
boolean first = true;
for (LinkData link : links) {
if (!first) {
sb.append(",");
}
sb.append("{\"operation_Id\":\"");
sb.append(link.getSpanContext().getTraceIdAsHexString());
sb.append("\",\"id\":\"");
sb.append(link.getSpanContext().getSpanIdAsHexString());
sb.append("\"}");
first = false;
}
sb.append("]");
properties.put("_MS.links", sb.toString());
}
private static String removeAttributeString(Attributes attributes, String attributeName) {
Object attributeValue = attributes.get(AttributeKey.stringKey(attributeName));
if (attributeValue == null) {
return null;
} else if (attributeValue instanceof String) {
return attributeValue.toString();
} else {
return null;
}
}
private static String createTarget(URI uriObject) {
String target = uriObject.getHost();
if (uriObject.getPort() != 80 && uriObject.getPort() != 443 && uriObject.getPort() != -1) {
target += ":" + uriObject.getPort();
}
return target;
}
private static String getStringValue(AttributeKey<?> attributeKey, Object value) {
switch (attributeKey.getType()) {
case STRING:
case BOOLEAN:
case LONG:
case DOUBLE:
return String.valueOf(value);
case STRING_ARRAY:
case BOOLEAN_ARRAY:
case LONG_ARRAY:
case DOUBLE_ARRAY:
return join((List<?>) value);
default:
return null;
}
}
private static <T> String join(List<T> values) {
StringBuilder sb = new StringBuilder();
if (CoreUtils.isNullOrEmpty(values)) {
return sb.toString();
}
for (int i = 0; i < values.size() - 1; i++) {
sb.append(values.get(i));
sb.append(", ");
}
sb.append(values.get(values.size() - 1));
return sb.toString();
}
private static void addExtraAttributes(final Map<String, String> properties, Attributes attributes) {
attributes.forEach((key, value) -> {
String val = getStringValue(key, value);
if (val != null) {
properties.put(key.toString(), val);
}
});
}
} |
Why are some checks performed outside the `checkAndPrepareRestore ` method and some checks that immediately return are done inside? | public RocksDBKeyedStateBackend<K> build() throws IOException {
RocksDBKeyedStateBackend<K> backend = null;
RocksDBWriteBatchWrapper writeBatchWrapper = null;
ColumnFamilyHandle defaultColumnFamilyHandle = null;
RocksDBNativeMetricMonitor nativeMetricMonitor = null;
try {
prepareDirectories();
RocksDBIncrementalRestorePrepareResult restorePrepareResult = null;
if (restoreStateHandles != null && !restoreStateHandles.isEmpty()) {
restorePrepareResult = checkAndPrepareRestore();
}
if (db == null) {
db = RocksDBOperationUtils.openDB(dbPath, columnFamilyDescriptors,
columnFamilyHandles, columnFamilyOptions, dbOptions);
defaultColumnFamilyHandle = columnFamilyHandles.remove(0);
}
writeBatchWrapper = new RocksDBWriteBatchWrapper(db, writeOptions);
initializeSnapshotStrategy();
if (nativeMetricOptions.isEnabled()) {
nativeMetricMonitor = new RocksDBNativeMetricMonitor(db, nativeMetricOptions, metricGroup);
}
backend = new RocksDBKeyedStateBackend<>(
this.operatorIdentifier,
this.userCodeClassLoader,
this.instanceBasePath,
this.dbOptions,
this.columnFamilyOptions,
this.kvStateRegistry,
this.keySerializer,
this.numberOfKeyGroups,
this.keyGroupRange,
this.executionConfig,
this.numberOfTransferingThreads,
this.ttlTimeProvider,
this.db,
this.kvStateInformation,
this.keyGroupPrefixBytes,
this.cancelStreamRegistry,
this.keyGroupCompressionDecorator,
this.rocksDBResourceGuard,
this.checkpointSnapshotStrategy,
this.savepointSnapshotStrategy,
writeBatchWrapper,
defaultColumnFamilyHandle,
nativeMetricMonitor);
PriorityQueueSetFactory priorityQueueFactory = initPriorityQueueFactory(backend);
backend.setPriorityQueueFactory(priorityQueueFactory);
if (restorePrepareResult != null) {
postBackendInitialization(backend, restorePrepareResult);
}
return backend;
} catch (Throwable e) {
if (backend != null) {
backend.dispose();
} else {
IOUtils.closeQuietly(defaultColumnFamilyHandle);
IOUtils.closeQuietly(writeBatchWrapper);
IOUtils.closeQuietly(db);
IOUtils.closeQuietly(cancelStreamRegistry);
}
if (e instanceof IOException) {
throw e;
} else {
String errMsg = "Caught unexpected exception.";
LOG.error(errMsg, e);
throw new IOException(errMsg, e);
}
}
} | if (restoreStateHandles != null && !restoreStateHandles.isEmpty()) { | public RocksDBKeyedStateBackend<K> build() throws BackendBuildingException {
RocksDBWriteBatchWrapper writeBatchWrapper = null;
ColumnFamilyHandle defaultColumnFamilyHandle = null;
RocksDBNativeMetricMonitor nativeMetricMonitor = null;
CloseableRegistry cancelStreamRegistry = new CloseableRegistry();
WriteOptions writeOptions = new WriteOptions().setDisableWAL(true);
LinkedHashMap<String, RocksDBKeyedStateBackend.RocksDbKvStateInfo> kvStateInformation = new LinkedHashMap<>();
RocksDB db = null;
AbstractRocksDBRestoreOperation restoreOperation = null;
RocksDbTtlCompactFiltersManager ttlCompactFiltersManager =
new RocksDbTtlCompactFiltersManager(enableTtlCompactionFilter);
ResourceGuard rocksDBResourceGuard = new ResourceGuard();
SnapshotStrategy<K> snapshotStrategy;
PriorityQueueSetFactory priorityQueueFactory;
RocksDBSerializedCompositeKeyBuilder<K> sharedRocksKeyBuilder;
int keyGroupPrefixBytes = RocksDBKeySerializationUtils.computeRequiredBytesInKeyGroupPrefix(numberOfKeyGroups);
try {
UUID backendUID = UUID.randomUUID();
SortedMap<Long, Set<StateHandleID>> materializedSstFiles = new TreeMap<>();
long lastCompletedCheckpointId = -1L;
if (injectedTestDB != null) {
db = injectedTestDB;
defaultColumnFamilyHandle = injectedDefaultColumnFamilyHandle;
nativeMetricMonitor = nativeMetricOptions.isEnabled() ?
new RocksDBNativeMetricMonitor(nativeMetricOptions, metricGroup, db) : null;
} else {
prepareDirectories();
restoreOperation = getRocksDBRestoreOperation(
keyGroupPrefixBytes, cancelStreamRegistry, kvStateInformation, ttlCompactFiltersManager);
RocksDBRestoreResult restoreResult = restoreOperation.restore();
db = restoreResult.getDb();
defaultColumnFamilyHandle = restoreResult.getDefaultColumnFamilyHandle();
nativeMetricMonitor = restoreResult.getNativeMetricMonitor();
if (restoreOperation instanceof RocksDBIncrementalRestoreOperation) {
backendUID = restoreResult.getBackendUID();
materializedSstFiles = restoreResult.getRestoredSstFiles();
lastCompletedCheckpointId = restoreResult.getLastCompletedCheckpointId();
}
}
writeBatchWrapper = new RocksDBWriteBatchWrapper(db, writeOptions);
sharedRocksKeyBuilder = new RocksDBSerializedCompositeKeyBuilder<>(
keySerializerProvider.currentSchemaSerializer(),
keyGroupPrefixBytes,
32);
snapshotStrategy = initializeSavepointAndCheckpointStrategies(cancelStreamRegistry, rocksDBResourceGuard,
kvStateInformation, keyGroupPrefixBytes, db, backendUID, materializedSstFiles, lastCompletedCheckpointId);
priorityQueueFactory = initPriorityQueueFactory(keyGroupPrefixBytes, kvStateInformation, db,
writeBatchWrapper, nativeMetricMonitor);
} catch (Throwable e) {
List<ColumnFamilyOptions> columnFamilyOptions = new ArrayList<>(kvStateInformation.values().size());
IOUtils.closeQuietly(cancelStreamRegistry);
IOUtils.closeQuietly(writeBatchWrapper);
RocksDBOperationUtils.addColumnFamilyOptionsToCloseLater(columnFamilyOptions, defaultColumnFamilyHandle);
IOUtils.closeQuietly(defaultColumnFamilyHandle);
IOUtils.closeQuietly(nativeMetricMonitor);
for (RocksDBKeyedStateBackend.RocksDbKvStateInfo kvStateInfo : kvStateInformation.values()) {
RocksDBOperationUtils.addColumnFamilyOptionsToCloseLater(columnFamilyOptions, kvStateInfo.columnFamilyHandle);
IOUtils.closeQuietly(kvStateInfo.columnFamilyHandle);
}
IOUtils.closeQuietly(db);
IOUtils.closeQuietly(restoreOperation);
IOUtils.closeAllQuietly(columnFamilyOptions);
IOUtils.closeQuietly(dbOptions);
IOUtils.closeQuietly(writeOptions);
ttlCompactFiltersManager.disposeAndClearRegisteredCompactionFactories();
kvStateInformation.clear();
try {
FileUtils.deleteDirectory(instanceBasePath);
} catch (Exception ex) {
LOG.warn("Failed to instance base path for RocksDB: " + instanceBasePath, ex);
}
if (e instanceof BackendBuildingException) {
throw (BackendBuildingException) e;
} else {
String errMsg = "Caught unexpected exception.";
LOG.error(errMsg, e);
throw new BackendBuildingException(errMsg, e);
}
}
return new RocksDBKeyedStateBackend<>(
this.operatorIdentifier,
this.userCodeClassLoader,
this.instanceBasePath,
this.dbOptions,
columnFamilyOptionsFactory,
this.kvStateRegistry,
this.keySerializerProvider,
this.numberOfKeyGroups,
this.keyGroupRange,
this.executionConfig,
this.numberOfTransferingThreads,
this.ttlTimeProvider,
db,
kvStateInformation,
keyGroupPrefixBytes,
cancelStreamRegistry,
this.keyGroupCompressionDecorator,
rocksDBResourceGuard,
snapshotStrategy.checkpointSnapshotStrategy,
snapshotStrategy.savepointSnapshotStrategy,
writeBatchWrapper,
defaultColumnFamilyHandle,
nativeMetricMonitor,
sharedRocksKeyBuilder,
priorityQueueFactory,
ttlCompactFiltersManager
);
} | class RocksDBKeyedStateBackendBuilder<K> extends AbstractKeyedStateBackendBuilder<K> {
private static final Logger LOG = LoggerFactory.getLogger(RocksDBKeyedStateBackendBuilder.class);
public static final String DB_INSTANCE_DIR_STRING = "db";
/** String that identifies the operator that owns this backend. */
private final String operatorIdentifier;
private final RocksDBStateBackend.PriorityQueueStateType priorityQueueStateType;
/** The configuration of local recovery. */
private final LocalRecoveryConfig localRecoveryConfig;
/** The column family options from the options factory. */
private final ColumnFamilyOptions columnFamilyOptions;
/** The DB options from the options factory. */
private final DBOptions dbOptions;
/** Path where this configured instance stores its data directory. */
private final File instanceBasePath;
/** Path where this configured instance stores its RocksDB database. */
private final File instanceRocksDBPath;
/** The write options to use in the states. We disable write ahead logging. */
private final WriteOptions writeOptions;
/**
* Information about the k/v states, maintained in the order as we create them. This is used to retrieve the
* column family that is used for a state and also for sanity checks when restoring.
*/
private final LinkedHashMap<String, StateColumnFamilyHandle> kvStateInformation;
/**
* Number of bytes required to prefix the key groups.
*/
private final int keyGroupPrefixBytes;
private final MetricGroup metricGroup;
private final CloseableRegistry cancelStreamRegistry;
/** True if incremental checkpointing is enabled. */
private boolean enableIncrementalCheckpointing = false;
private RocksDBNativeMetricOptions nativeMetricOptions = new RocksDBNativeMetricOptions();
private int numberOfTransferingThreads = RocksDBOptions.CHECKPOINT_TRANSFER_THREAD_NUM.defaultValue();
private RocksDB db;
private List<ColumnFamilyHandle> columnFamilyHandles = new ArrayList<>(1);
private List<ColumnFamilyDescriptor> columnFamilyDescriptors = Collections.emptyList();
private final String dbPath;
private final ResourceGuard rocksDBResourceGuard;
private RocksDBSnapshotStrategyBase<K> checkpointSnapshotStrategy;
private RocksDBSnapshotStrategyBase<K> savepointSnapshotStrategy;
private UUID backendUID = UUID.randomUUID();
private SortedMap<Long, Set<StateHandleID>> materializedSstFiles = new TreeMap<>();
private long lastCompletedCheckpointId = -1L;
public RocksDBKeyedStateBackendBuilder(
String operatorIdentifier,
ClassLoader userCodeClassLoader,
File instanceBasePath,
DBOptions dbOptions,
ColumnFamilyOptions columnFamilyOptions,
TaskKvStateRegistry kvStateRegistry,
TypeSerializer<K> keySerializer,
int numberOfKeyGroups,
KeyGroupRange keyGroupRange,
ExecutionConfig executionConfig,
LocalRecoveryConfig localRecoveryConfig,
RocksDBStateBackend.PriorityQueueStateType priorityQueueStateType,
TtlTimeProvider ttlTimeProvider,
MetricGroup metricGroup) {
super(kvStateRegistry, keySerializer, userCodeClassLoader,
numberOfKeyGroups, keyGroupRange, executionConfig, ttlTimeProvider);
this.operatorIdentifier = operatorIdentifier;
this.priorityQueueStateType = priorityQueueStateType;
this.localRecoveryConfig = localRecoveryConfig;
this.columnFamilyOptions = Preconditions.checkNotNull(columnFamilyOptions)
.setMergeOperatorName(MERGE_OPERATOR_NAME);
this.dbOptions = dbOptions;
this.instanceBasePath = instanceBasePath;
this.kvStateInformation = new LinkedHashMap<>();
this.keyGroupPrefixBytes =
RocksDBKeySerializationUtils.computeRequiredBytesInKeyGroupPrefix(numberOfKeyGroups);
this.writeOptions = new WriteOptions().setDisableWAL(true);
this.instanceRocksDBPath = new File(instanceBasePath, DB_INSTANCE_DIR_STRING);
this.dbPath = instanceRocksDBPath.getAbsolutePath();
this.rocksDBResourceGuard = new ResourceGuard();
this.metricGroup = metricGroup;
this.cancelStreamRegistry = new CloseableRegistry();
}
@Override
public RocksDBKeyedStateBackendBuilder<K> setRestoreStateHandles(Collection<KeyedStateHandle> restoreStateHandles) {
this.restoreStateHandles = restoreStateHandles;
return this;
}
public RocksDBKeyedStateBackendBuilder<K> setEnableIncrementalCheckpointing(boolean enableIncrementalCheckpointing) {
this.enableIncrementalCheckpointing = enableIncrementalCheckpointing;
return this;
}
public RocksDBKeyedStateBackendBuilder<K> setNativeMetricOptions(RocksDBNativeMetricOptions nativeMetricOptions) {
this.nativeMetricOptions = nativeMetricOptions;
return this;
}
public RocksDBKeyedStateBackendBuilder<K> setNumberOfTransferingThreads(int numberOfTransferingThreads) {
this.numberOfTransferingThreads = numberOfTransferingThreads;
return this;
}
public RocksDBKeyedStateBackendBuilder<K> setDb(RocksDB db) {
this.db = db;
return this;
}
private static void checkAndCreateDirectory(File directory) throws IOException {
if (directory.exists()) {
if (!directory.isDirectory()) {
throw new IOException("Not a directory: " + directory);
}
} else if (!directory.mkdirs()) {
throw new IOException(String.format("Could not create RocksDB data directory at %s.", directory));
}
}
private void initializeSnapshotStrategy() {
this.savepointSnapshotStrategy = new RocksFullSnapshotStrategy<>(
db,
rocksDBResourceGuard,
keySerializer,
kvStateInformation,
keyGroupRange,
keyGroupPrefixBytes,
localRecoveryConfig,
cancelStreamRegistry,
keyGroupCompressionDecorator);
if (enableIncrementalCheckpointing) {
checkpointSnapshotStrategy = new RocksIncrementalSnapshotStrategy<>(
db,
rocksDBResourceGuard,
keySerializer,
kvStateInformation,
keyGroupRange,
keyGroupPrefixBytes,
localRecoveryConfig,
cancelStreamRegistry,
instanceBasePath,
backendUID,
materializedSstFiles,
lastCompletedCheckpointId,
numberOfTransferingThreads);
} else {
checkpointSnapshotStrategy = savepointSnapshotStrategy;
}
}
private PriorityQueueSetFactory initPriorityQueueFactory(RocksDBKeyedStateBackend<K> backend) {
PriorityQueueSetFactory priorityQueueFactory;
switch (priorityQueueStateType) {
case HEAP:
priorityQueueFactory = new HeapPriorityQueueSetFactory(keyGroupRange, numberOfKeyGroups, 128);
break;
case ROCKSDB:
priorityQueueFactory = new RocksDBPriorityQueueSetFactory(backend);
break;
default:
throw new IllegalArgumentException("Unknown priority queue state type: " + priorityQueueStateType);
}
return priorityQueueFactory;
}
private void postBackendInitialization(
RocksDBKeyedStateBackend<K> backend,
RocksDBIncrementalRestorePrepareResult restorePrepareResult) throws IOException {
KeyGroupRange initialHandleKeyGroupRange = restorePrepareResult.getInitialHandleKeyGroupRange();
if (initialHandleKeyGroupRange != null) {
try {
RocksDBIncrementalCheckpointUtils.clipDBWithKeyGroupRange(
db,
columnFamilyHandles,
keyGroupRange,
initialHandleKeyGroupRange,
backend.getKeyGroupPrefixBytes());
} catch (RocksDBException e) {
backend.dispose();
String errMsg = "Failed to clip DB after initialization.";
LOG.error(errMsg, e);
throw new IOException(errMsg, e);
}
} else {
IncrementalLocalKeyedStateHandle restoreStateHandle = restorePrepareResult.getLocalKeyedStateHandle();
materializedSstFiles.put(
restoreStateHandle.getCheckpointId(),
restoreStateHandle.getSharedStateHandleIDs());
lastCompletedCheckpointId = restoreStateHandle.getCheckpointId();
}
List<StateMetaInfoSnapshot> metaInfoSnapshots = restorePrepareResult.getStateMetaInfoSnapshots();
for (int i = 0; i < metaInfoSnapshots.size(); ++i) {
try {
RocksDBIncrementalRestoreOperation.getOrRegisterColumnFamilyHandle(
backend,
columnFamilyDescriptors.get(i),
columnFamilyHandles.get(i),
metaInfoSnapshots.get(i));
} catch (RocksDBException e) {
backend.dispose();
String errMsg = "Failed to register CF handle.";
LOG.error(errMsg, e);
throw new IOException(errMsg, e);
}
}
TypeSerializerSchemaCompatibility keySerializerSchemaCompat =
backend.checkKeySerializerSchemaCompatibility(restorePrepareResult.getSerializerSnapshot());
if (keySerializerSchemaCompat.isCompatibleAfterMigration() || keySerializerSchemaCompat.isIncompatible()) {
backend.dispose();
String errMsg = "The new key serializer must be compatible.";
StateMigrationException cause = new StateMigrationException(errMsg);
LOG.error(errMsg, cause);
throw new IOException(cause);
}
}
private void prepareDirectories() throws IOException {
checkAndCreateDirectory(instanceBasePath);
if (instanceRocksDBPath.exists()) {
FileUtils.deleteDirectory(instanceBasePath);
}
}
private RocksDBIncrementalRestorePrepareResult checkAndPrepareRestore() throws IOException {
KeyedStateHandle firstStateHandle = restoreStateHandles.iterator().next();
boolean isIncrementalStateHandle = (firstStateHandle instanceof IncrementalKeyedStateHandle)
|| (firstStateHandle instanceof IncrementalLocalKeyedStateHandle);
if (!isIncrementalStateHandle) {
return null;
}
RocksDBIncrementalRestorePrepareResult prepareResult = null;
Path instancePath = new Path(dbPath);
boolean isRescaling = (restoreStateHandles.size() > 1 ||
!Objects.equals(firstStateHandle.getKeyGroupRange(), keyGroupRange));
if (!isRescaling) {
Path temporaryRestoreInstancePath = new Path(
instanceBasePath.getAbsolutePath(),
UUID.randomUUID().toString());
prepareResult = prepareFiles(firstStateHandle, temporaryRestoreInstancePath);
Path restoreSourcePath = prepareResult.getLocalKeyedStateHandle().getDirectoryStateHandle().getDirectory();
if (firstStateHandle instanceof IncrementalKeyedStateHandle) {
backendUID = ((IncrementalKeyedStateHandle) firstStateHandle).getBackendIdentifier();
} else {
backendUID = ((IncrementalLocalKeyedStateHandle) firstStateHandle).getBackendIdentifier();
}
LOG.debug("Restoring keyed backend uid in operator {} from incremental snapshot to {}.",
this.operatorIdentifier, this.backendUID);
if (!instanceRocksDBPath.mkdirs()) {
throw new IOException("Could not create RocksDB data directory.");
}
try {
RocksDBIncrementalRestoreOperation.restoreInstanceDirectoryFromPath(restoreSourcePath, dbPath);
} finally {
FileSystem restoreFileSystem = restoreSourcePath.getFileSystem();
if (restoreFileSystem.exists(restoreSourcePath)) {
restoreFileSystem.delete(restoreSourcePath, true);
}
}
} else {
IncrementalKeyedStateHandle initialHandle =
(IncrementalKeyedStateHandle) RocksDBIncrementalCheckpointUtils.chooseTheBestStateHandleForInitial(
restoreStateHandles, keyGroupRange);
if (initialHandle != null) {
restoreStateHandles.remove(initialHandle);
prepareResult = prepareFiles(initialHandle, instancePath);
prepareResult.setInitialHandleKeyGroupRange(initialHandle.getKeyGroupRange());
}
}
return prepareResult;
}
private RocksDBIncrementalRestorePrepareResult prepareFiles(
KeyedStateHandle stateHandle,
Path restoreInstancePath) throws IOException {
RocksDBIncrementalRestorePrepareResult prepareResult;
try {
prepareResult = RocksDBIncrementalRestoreOperation.prepareFiles(
stateHandle,
restoreInstancePath,
numberOfTransferingThreads,
cancelStreamRegistry,
userCodeClassLoader,
columnFamilyOptions
);
} catch (Exception e) {
FileSystem restoreFileSystem = restoreInstancePath.getFileSystem();
if (restoreFileSystem.exists(restoreInstancePath)) {
restoreFileSystem.delete(restoreInstancePath, true);
}
String errMsg = "Failed to prepare files for restore from incremental state handle.";
LOG.error(errMsg, e);
throw new IOException(errMsg, e);
}
columnFamilyDescriptors = prepareResult.getColumnFamilyDescriptors();
columnFamilyHandles =
new ArrayList<>(columnFamilyDescriptors.size() + 1);
return prepareResult;
}
} | class RocksDBKeyedStateBackendBuilder<K> extends AbstractKeyedStateBackendBuilder<K> {
private static final Logger LOG = LoggerFactory.getLogger(RocksDBKeyedStateBackendBuilder.class);
public static final String DB_INSTANCE_DIR_STRING = "db";
/** String that identifies the operator that owns this backend. */
private final String operatorIdentifier;
private final RocksDBStateBackend.PriorityQueueStateType priorityQueueStateType;
/** The configuration of local recovery. */
private final LocalRecoveryConfig localRecoveryConfig;
/** Factory function to create column family options from state name. */
private final Function<String, ColumnFamilyOptions> columnFamilyOptionsFactory;
/** The DB options from the options factory. */
private final DBOptions dbOptions;
/** Path where this configured instance stores its data directory. */
private final File instanceBasePath;
/** Path where this configured instance stores its RocksDB database. */
private final File instanceRocksDBPath;
private final MetricGroup metricGroup;
/** True if incremental checkpointing is enabled. */
private boolean enableIncrementalCheckpointing;
/** True if ttl compaction filter is enabled. */
private boolean enableTtlCompactionFilter;
private RocksDBNativeMetricOptions nativeMetricOptions;
private int numberOfTransferingThreads;
private RocksDB injectedTestDB;
private ColumnFamilyHandle injectedDefaultColumnFamilyHandle;
public RocksDBKeyedStateBackendBuilder(
String operatorIdentifier,
ClassLoader userCodeClassLoader,
File instanceBasePath,
DBOptions dbOptions,
Function<String, ColumnFamilyOptions> columnFamilyOptionsFactory,
TaskKvStateRegistry kvStateRegistry,
TypeSerializer<K> keySerializer,
int numberOfKeyGroups,
KeyGroupRange keyGroupRange,
ExecutionConfig executionConfig,
LocalRecoveryConfig localRecoveryConfig,
RocksDBStateBackend.PriorityQueueStateType priorityQueueStateType,
TtlTimeProvider ttlTimeProvider,
MetricGroup metricGroup,
@Nonnull Collection<KeyedStateHandle> stateHandles,
StreamCompressionDecorator keyGroupCompressionDecorator) {
super(
kvStateRegistry,
keySerializer,
userCodeClassLoader,
numberOfKeyGroups,
keyGroupRange,
executionConfig,
ttlTimeProvider,
stateHandles,
keyGroupCompressionDecorator
);
this.operatorIdentifier = operatorIdentifier;
this.priorityQueueStateType = priorityQueueStateType;
this.localRecoveryConfig = localRecoveryConfig;
this.columnFamilyOptionsFactory = Preconditions.checkNotNull(columnFamilyOptionsFactory);
this.dbOptions = dbOptions;
this.instanceBasePath = instanceBasePath;
this.instanceRocksDBPath = new File(instanceBasePath, DB_INSTANCE_DIR_STRING);
this.metricGroup = metricGroup;
this.enableIncrementalCheckpointing = false;
this.nativeMetricOptions = new RocksDBNativeMetricOptions();
this.numberOfTransferingThreads = RocksDBOptions.CHECKPOINT_TRANSFER_THREAD_NUM.defaultValue();
}
@VisibleForTesting
RocksDBKeyedStateBackendBuilder(
String operatorIdentifier,
ClassLoader userCodeClassLoader,
File instanceBasePath,
DBOptions dbOptions,
Function<String, ColumnFamilyOptions> columnFamilyOptionsFactory,
TaskKvStateRegistry kvStateRegistry,
TypeSerializer<K> keySerializer,
int numberOfKeyGroups,
KeyGroupRange keyGroupRange,
ExecutionConfig executionConfig,
LocalRecoveryConfig localRecoveryConfig,
RocksDBStateBackend.PriorityQueueStateType priorityQueueStateType,
TtlTimeProvider ttlTimeProvider,
MetricGroup metricGroup,
@Nonnull Collection<KeyedStateHandle> stateHandles,
StreamCompressionDecorator keyGroupCompressionDecorator,
RocksDB injectedTestDB,
ColumnFamilyHandle injectedDefaultColumnFamilyHandle) {
this(
operatorIdentifier,
userCodeClassLoader,
instanceBasePath,
dbOptions,
columnFamilyOptionsFactory,
kvStateRegistry,
keySerializer,
numberOfKeyGroups,
keyGroupRange,
executionConfig,
localRecoveryConfig,
priorityQueueStateType,
ttlTimeProvider,
metricGroup,
stateHandles,
keyGroupCompressionDecorator
);
this.injectedTestDB = injectedTestDB;
this.injectedDefaultColumnFamilyHandle = injectedDefaultColumnFamilyHandle;
}
public RocksDBKeyedStateBackendBuilder<K> setEnableIncrementalCheckpointing(boolean enableIncrementalCheckpointing) {
this.enableIncrementalCheckpointing = enableIncrementalCheckpointing;
return this;
}
public RocksDBKeyedStateBackendBuilder<K> setEnableTtlCompactionFilter (boolean enableTtlCompactionFilter) {
this.enableTtlCompactionFilter = enableTtlCompactionFilter;
return this;
}
public RocksDBKeyedStateBackendBuilder<K> setNativeMetricOptions(RocksDBNativeMetricOptions nativeMetricOptions) {
this.nativeMetricOptions = nativeMetricOptions;
return this;
}
public RocksDBKeyedStateBackendBuilder<K> setNumberOfTransferingThreads(int numberOfTransferingThreads) {
this.numberOfTransferingThreads = numberOfTransferingThreads;
return this;
}
private static void checkAndCreateDirectory(File directory) throws IOException {
if (directory.exists()) {
if (!directory.isDirectory()) {
throw new IOException("Not a directory: " + directory);
}
} else if (!directory.mkdirs()) {
throw new IOException(String.format("Could not create RocksDB data directory at %s.", directory));
}
}
private AbstractRocksDBRestoreOperation<K> getRocksDBRestoreOperation(
int keyGroupPrefixBytes,
CloseableRegistry cancelStreamRegistry,
LinkedHashMap<String, RocksDBKeyedStateBackend.RocksDbKvStateInfo> kvStateInformation,
RocksDbTtlCompactFiltersManager ttlCompactFiltersManager) {
if (restoreStateHandles.isEmpty()) {
return new RocksDBNoneRestoreOperation<>(
keyGroupRange,
keyGroupPrefixBytes,
numberOfTransferingThreads,
cancelStreamRegistry,
userCodeClassLoader,
kvStateInformation,
keySerializerProvider,
instanceBasePath,
instanceRocksDBPath,
dbOptions,
columnFamilyOptionsFactory,
nativeMetricOptions,
metricGroup,
restoreStateHandles,
ttlCompactFiltersManager,
ttlTimeProvider);
}
KeyedStateHandle firstStateHandle = restoreStateHandles.iterator().next();
boolean isIncrementalStateHandle = (firstStateHandle instanceof IncrementalKeyedStateHandle)
|| (firstStateHandle instanceof IncrementalLocalKeyedStateHandle);
if (isIncrementalStateHandle) {
return new RocksDBIncrementalRestoreOperation<>(
operatorIdentifier,
keyGroupRange,
keyGroupPrefixBytes,
numberOfTransferingThreads,
cancelStreamRegistry,
userCodeClassLoader,
kvStateInformation,
keySerializerProvider,
instanceBasePath,
instanceRocksDBPath,
dbOptions,
columnFamilyOptionsFactory,
nativeMetricOptions,
metricGroup,
restoreStateHandles,
ttlCompactFiltersManager,
ttlTimeProvider);
} else {
return new RocksDBFullRestoreOperation<>(
keyGroupRange,
keyGroupPrefixBytes,
numberOfTransferingThreads,
cancelStreamRegistry,
userCodeClassLoader,
kvStateInformation,
keySerializerProvider,
instanceBasePath,
instanceRocksDBPath,
dbOptions,
columnFamilyOptionsFactory,
nativeMetricOptions,
metricGroup,
restoreStateHandles,
ttlCompactFiltersManager,
ttlTimeProvider);
}
}
private SnapshotStrategy<K> initializeSavepointAndCheckpointStrategies(
CloseableRegistry cancelStreamRegistry,
ResourceGuard rocksDBResourceGuard,
LinkedHashMap<String, RocksDBKeyedStateBackend.RocksDbKvStateInfo> kvStateInformation,
int keyGroupPrefixBytes,
RocksDB db,
UUID backendUID,
SortedMap<Long, Set<StateHandleID>> materializedSstFiles,
long lastCompletedCheckpointId) {
RocksDBSnapshotStrategyBase<K> savepointSnapshotStrategy = new RocksFullSnapshotStrategy<>(
db,
rocksDBResourceGuard,
keySerializer,
kvStateInformation,
keyGroupRange,
keyGroupPrefixBytes,
localRecoveryConfig,
cancelStreamRegistry,
keyGroupCompressionDecorator);
RocksDBSnapshotStrategyBase<K> checkpointSnapshotStrategy;
if (enableIncrementalCheckpointing) {
checkpointSnapshotStrategy = new RocksIncrementalSnapshotStrategy<>(
db,
rocksDBResourceGuard,
keySerializer,
kvStateInformation,
keyGroupRange,
keyGroupPrefixBytes,
localRecoveryConfig,
cancelStreamRegistry,
instanceBasePath,
backendUID,
materializedSstFiles,
lastCompletedCheckpointId,
numberOfTransferingThreads);
} else {
checkpointSnapshotStrategy = savepointSnapshotStrategy;
}
return new SnapshotStrategy<>(checkpointSnapshotStrategy, savepointSnapshotStrategy);
}
private PriorityQueueSetFactory initPriorityQueueFactory(
int keyGroupPrefixBytes,
Map<String, RocksDBKeyedStateBackend.RocksDbKvStateInfo> kvStateInformation,
RocksDB db,
RocksDBWriteBatchWrapper writeBatchWrapper,
RocksDBNativeMetricMonitor nativeMetricMonitor) {
PriorityQueueSetFactory priorityQueueFactory;
switch (priorityQueueStateType) {
case HEAP:
priorityQueueFactory = new HeapPriorityQueueSetFactory(keyGroupRange, numberOfKeyGroups, 128);
break;
case ROCKSDB:
priorityQueueFactory = new RocksDBPriorityQueueSetFactory(
keyGroupRange,
keyGroupPrefixBytes,
numberOfKeyGroups,
kvStateInformation,
db,
writeBatchWrapper,
nativeMetricMonitor,
columnFamilyOptionsFactory
);
break;
default:
throw new IllegalArgumentException("Unknown priority queue state type: " + priorityQueueStateType);
}
return priorityQueueFactory;
}
private void prepareDirectories() throws IOException {
checkAndCreateDirectory(instanceBasePath);
if (instanceRocksDBPath.exists()) {
FileUtils.deleteDirectory(instanceBasePath);
}
}
static final class SnapshotStrategy<K> {
final RocksDBSnapshotStrategyBase<K> checkpointSnapshotStrategy;
final RocksDBSnapshotStrategyBase<K> savepointSnapshotStrategy;
SnapshotStrategy(RocksDBSnapshotStrategyBase<K> checkpointSnapshotStrategy,
RocksDBSnapshotStrategyBase<K> savepointSnapshotStrategy) {
this.checkpointSnapshotStrategy = checkpointSnapshotStrategy;
this.savepointSnapshotStrategy = savepointSnapshotStrategy;
}
}
} |
Can be simplifed to `mails.getOrDefault(recipient, List.of())`. | public List<Mail> inbox(String recipient) {
if (!mails.containsKey(recipient)) {
return List.of();
}
return mails.get(recipient);
} | return mails.get(recipient); | public List<Mail> inbox(String recipient) {
return mails.getOrDefault(recipient, List.of());
} | class MockMailer implements Mailer {
public final Map<String, List<Mail>> mails = new HashMap<>();
@Override
public void send(Mail mail) {
for (String recipient : mail.recipients()) {
mails.putIfAbsent(recipient, new ArrayList<>());
mails.get(recipient).add(mail);
}
}
@Override
public String user() {
return "user";
}
@Override
public String domain() {
return "domain";
}
/** Returns the list of mails sent to the given recipient. Modifications affect the set of mails stored in this. */
public void discard() {
mails.keySet().forEach(k -> mails.remove(k));
}
} | class MockMailer implements Mailer {
public final Map<String, List<Mail>> mails = new HashMap<>();
public final boolean blackhole;
public MockMailer() {
this(false);
}
MockMailer(boolean blackhole) {
this.blackhole = blackhole;
}
public static MockMailer blackhole() {
return new MockMailer(true);
}
@Override
public void send(Mail mail) {
if (blackhole) {
return;
}
for (String recipient : mail.recipients()) {
mails.putIfAbsent(recipient, new ArrayList<>());
mails.get(recipient).add(mail);
}
}
@Override
public String user() {
return "user";
}
@Override
public String domain() {
return "domain";
}
/** Returns the list of mails sent to the given recipient. Modifications affect the set of mails stored in this. */
public void reset() {
mails.clear();
}
} |
only cachedPartitionNames, cachedSnapshotId would be assigned at new IcebergTable | public void setCachedPartitionNames(List<String> cachedPartitionNames) {
writeLock();
try {
this.cachedPartitionNames = cachedPartitionNames;
} finally {
writeUnlock();
}
} | this.cachedPartitionNames = cachedPartitionNames; | public void setCachedPartitionNames(List<String> cachedPartitionNames) {
writeLock();
try {
this.cachedPartitionNames = cachedPartitionNames;
} finally {
writeUnlock();
}
} | class IcebergTable extends Table {
private static final Logger LOG = LogManager.getLogger(IcebergTable.class);
private static final String JSON_KEY_ICEBERG_DB = "database";
private static final String JSON_KEY_ICEBERG_TABLE = "table";
private static final String JSON_KEY_RESOURCE_NAME = "resource";
private static final String JSON_KEY_ICEBERG_PROPERTIES = "icebergProperties";
private static final String PARQUET_FORMAT = "parquet";
private String catalogName;
@SerializedName(value = "dn")
private String remoteDbName;
@SerializedName(value = "tn")
private String remoteTableName;
@SerializedName(value = "rn")
private String resourceName;
@SerializedName(value = "prop")
private Map<String, String> icebergProperties = Maps.newHashMap();
private org.apache.iceberg.Table nativeTable;
private List<Column> partitionColumns;
private Optional<Snapshot> snapshot = Optional.empty();
private long refreshSnapshotTime = -1L;
private long cachedSnapshotId = -1;
private List<String> cachedPartitionNames = Lists.newArrayList();
private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
public IcebergTable() {
super(TableType.ICEBERG);
}
public IcebergTable(long id, String srTableName, String catalogName, String resourceName, String remoteDbName,
String remoteTableName, List<Column> schema, org.apache.iceberg.Table nativeTable,
Map<String, String> icebergProperties) {
super(id, srTableName, TableType.ICEBERG, schema);
this.catalogName = catalogName;
this.resourceName = resourceName;
this.remoteDbName = remoteDbName;
this.remoteTableName = remoteTableName;
this.nativeTable = nativeTable;
this.icebergProperties = icebergProperties;
Optional<Snapshot> snapshot = getSnapshot();
this.cachedSnapshotId = snapshot.map(Snapshot::snapshotId).orElse(-1L);
}
public String getCatalogName() {
return catalogName == null ? getResourceMappingCatalogName(resourceName, "iceberg") : catalogName;
}
public String getResourceName() {
return resourceName;
}
public String getRemoteDbName() {
return remoteDbName;
}
public String getRemoteTableName() {
return remoteTableName;
}
public Optional<Snapshot> getSnapshot() {
if (snapshot.isPresent()) {
return snapshot;
} else {
snapshot = Optional.ofNullable(getNativeTable().currentSnapshot());
return snapshot;
}
}
public long getCachedSnapshotId() {
return cachedSnapshotId;
}
public List<String> getCachedPartitionNames() {
readLock();
try {
return cachedPartitionNames;
} finally {
readUnlock();
}
}
@Override
public String getUUID() {
if (CatalogMgr.isExternalCatalog(catalogName)) {
return String.join(".", catalogName, remoteDbName, remoteTableName,
((BaseTable) getNativeTable()).operations().current().uuid());
} else {
return Long.toString(id);
}
}
public List<Column> getPartitionColumns() {
if (partitionColumns == null) {
List<PartitionField> identityPartitionFields = this.getNativeTable().spec().fields().stream().
filter(partitionField -> partitionField.transform().isIdentity()).collect(Collectors.toList());
partitionColumns = identityPartitionFields.stream().map(partitionField -> getColumn(partitionField.name()))
.collect(Collectors.toList());
}
return partitionColumns;
}
public List<Integer> partitionColumnIndexes() {
List<Column> partitionCols = getPartitionColumns();
return partitionCols.stream().map(col -> fullSchema.indexOf(col)).collect(Collectors.toList());
}
public List<Integer> getSortKeyIndexes() {
List<Integer> indexes = new ArrayList<>();
org.apache.iceberg.Table nativeTable = getNativeTable();
List<Types.NestedField> fields = nativeTable.schema().asStruct().fields();
List<Integer> sortFieldSourceIds = nativeTable.sortOrder().fields().stream()
.map(SortField::sourceId)
.collect(Collectors.toList());
for (int i = 0; i < fields.size(); i++) {
Types.NestedField field = fields.get(i);
if (sortFieldSourceIds.contains(field.fieldId())) {
indexes.add(i);
}
}
return indexes;
}
public boolean isUnPartitioned() {
return getPartitionColumns().size() == 0;
}
public List<String> getPartitionColumnNames() {
return getPartitionColumns().stream().filter(java.util.Objects::nonNull).map(Column::getName)
.collect(Collectors.toList());
}
@Override
public String getTableIdentifier() {
return Joiner.on(":").join(name, ((BaseTable) getNativeTable()).operations().current().uuid());
}
public IcebergCatalogType getCatalogType() {
return IcebergCatalogType.valueOf(icebergProperties.get(ICEBERG_CATALOG_TYPE));
}
public String getTableLocation() {
return getNativeTable().location();
}
public org.apache.iceberg.Table getNativeTable() {
if (nativeTable == null) {
IcebergTable resourceMappingTable = (IcebergTable) GlobalStateMgr.getCurrentState().getMetadataMgr()
.getTable(getCatalogName(), remoteDbName, remoteTableName);
if (resourceMappingTable == null) {
throw new StarRocksConnectorException("Can't find table %s.%s.%s",
getCatalogName(), remoteDbName, remoteTableName);
}
nativeTable = resourceMappingTable.getNativeTable();
}
return nativeTable;
}
public long getRefreshSnapshotTime() {
return refreshSnapshotTime;
}
public void setRefreshSnapshotTime(long refreshSnapshotTime) {
this.refreshSnapshotTime = refreshSnapshotTime;
}
@Override
public TTableDescriptor toThrift(List<DescriptorTable.ReferencedPartitionInfo> partitions) {
Preconditions.checkNotNull(partitions);
TIcebergTable tIcebergTable = new TIcebergTable();
List<TColumn> tColumns = Lists.newArrayList();
for (Column column : getBaseSchema()) {
tColumns.add(column.toThrift());
}
tIcebergTable.setColumns(tColumns);
tIcebergTable.setIceberg_schema(IcebergApiConverter.getTIcebergSchema(nativeTable.schema()));
tIcebergTable.setPartition_column_names(getPartitionColumnNames());
TTableDescriptor tTableDescriptor = new TTableDescriptor(id, TTableType.ICEBERG_TABLE,
fullSchema.size(), 0, remoteTableName, remoteDbName);
tTableDescriptor.setIcebergTable(tIcebergTable);
return tTableDescriptor;
}
@Override
public void write(DataOutput out) throws IOException {
super.write(out);
JsonObject jsonObject = new JsonObject();
jsonObject.addProperty(JSON_KEY_ICEBERG_DB, remoteDbName);
jsonObject.addProperty(JSON_KEY_ICEBERG_TABLE, remoteTableName);
if (!Strings.isNullOrEmpty(resourceName)) {
jsonObject.addProperty(JSON_KEY_RESOURCE_NAME, resourceName);
}
if (!icebergProperties.isEmpty()) {
JsonObject jIcebergProperties = new JsonObject();
for (Map.Entry<String, String> entry : icebergProperties.entrySet()) {
jIcebergProperties.addProperty(entry.getKey(), entry.getValue());
}
jsonObject.add(JSON_KEY_ICEBERG_PROPERTIES, jIcebergProperties);
}
Text.writeString(out, jsonObject.toString());
}
@Override
public void readFields(DataInput in) throws IOException {
super.readFields(in);
String json = Text.readString(in);
JsonObject jsonObject = JsonParser.parseString(json).getAsJsonObject();
remoteDbName = jsonObject.getAsJsonPrimitive(JSON_KEY_ICEBERG_DB).getAsString();
remoteTableName = jsonObject.getAsJsonPrimitive(JSON_KEY_ICEBERG_TABLE).getAsString();
resourceName = jsonObject.getAsJsonPrimitive(JSON_KEY_RESOURCE_NAME).getAsString();
if (jsonObject.has(JSON_KEY_ICEBERG_PROPERTIES)) {
JsonObject jIcebergProperties = jsonObject.getAsJsonObject(JSON_KEY_ICEBERG_PROPERTIES);
for (Map.Entry<String, JsonElement> entry : jIcebergProperties.entrySet()) {
icebergProperties.put(entry.getKey(), entry.getValue().getAsString());
}
}
}
@Override
public boolean isSupported() {
return true;
}
@Override
public boolean supportInsert() {
return getNativeTable().properties().getOrDefault(DEFAULT_FILE_FORMAT, DEFAULT_FILE_FORMAT_DEFAULT)
.equalsIgnoreCase(PARQUET_FORMAT);
}
@Override
public int hashCode() {
return com.google.common.base.Objects.hashCode(getCatalogName(), remoteDbName, getTableIdentifier());
}
@Override
public boolean equals(Object other) {
if (!(other instanceof IcebergTable)) {
return false;
}
IcebergTable otherTable = (IcebergTable) other;
String catalogName = getCatalogName();
String tableIdentifier = getTableIdentifier();
return Objects.equal(catalogName, otherTable.getCatalogName()) &&
Objects.equal(remoteDbName, otherTable.remoteDbName) &&
Objects.equal(tableIdentifier, otherTable.getTableIdentifier());
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private long id;
private String srTableName;
private String catalogName;
private String resourceName;
private String remoteDbName;
private String remoteTableName;
private List<Column> fullSchema;
private Map<String, String> icebergProperties;
private org.apache.iceberg.Table nativeTable;
private long snapshotId;
public Builder() {
}
public Builder setId(long id) {
this.id = id;
return this;
}
public Builder setSrTableName(String srTableName) {
this.srTableName = srTableName;
return this;
}
public Builder setCatalogName(String catalogName) {
this.catalogName = catalogName;
return this;
}
public Builder setResourceName(String resourceName) {
this.resourceName = resourceName;
return this;
}
public Builder setRemoteDbName(String remoteDbName) {
this.remoteDbName = remoteDbName;
return this;
}
public Builder setRemoteTableName(String remoteTableName) {
this.remoteTableName = remoteTableName;
return this;
}
public Builder setFullSchema(List<Column> fullSchema) {
this.fullSchema = fullSchema;
return this;
}
public Builder setIcebergProperties(Map<String, String> icebergProperties) {
this.icebergProperties = icebergProperties;
return this;
}
public Builder setNativeTable(org.apache.iceberg.Table nativeTable) {
this.nativeTable = nativeTable;
return this;
}
public Builder setSnapshotId(long snapshotId) {
this.snapshotId = snapshotId;
return this;
}
public IcebergTable build() {
return new IcebergTable(id, srTableName, catalogName, resourceName, remoteDbName, remoteTableName,
fullSchema, nativeTable, icebergProperties);
}
}
private void writeLock() {
lock.writeLock().lock();
}
private void writeUnlock() {
lock.writeLock().unlock();
}
private void readLock() {
lock.readLock().lock();
}
private void readUnlock() {
lock.readLock().unlock();
}
} | class IcebergTable extends Table {
private static final Logger LOG = LogManager.getLogger(IcebergTable.class);
private static final String JSON_KEY_ICEBERG_DB = "database";
private static final String JSON_KEY_ICEBERG_TABLE = "table";
private static final String JSON_KEY_RESOURCE_NAME = "resource";
private static final String JSON_KEY_ICEBERG_PROPERTIES = "icebergProperties";
private static final String PARQUET_FORMAT = "parquet";
private String catalogName;
@SerializedName(value = "dn")
private String remoteDbName;
@SerializedName(value = "tn")
private String remoteTableName;
@SerializedName(value = "rn")
private String resourceName;
@SerializedName(value = "prop")
private Map<String, String> icebergProperties = Maps.newHashMap();
private org.apache.iceberg.Table nativeTable;
private List<Column> partitionColumns;
private Optional<Snapshot> snapshot = Optional.empty();
private long refreshSnapshotTime = -1L;
private long cachedSnapshotId = -1;
private List<String> cachedPartitionNames = Lists.newArrayList();
private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
public IcebergTable() {
super(TableType.ICEBERG);
}
public IcebergTable(long id, String srTableName, String catalogName, String resourceName, String remoteDbName,
String remoteTableName, List<Column> schema, org.apache.iceberg.Table nativeTable,
Map<String, String> icebergProperties) {
super(id, srTableName, TableType.ICEBERG, schema);
this.catalogName = catalogName;
this.resourceName = resourceName;
this.remoteDbName = remoteDbName;
this.remoteTableName = remoteTableName;
this.nativeTable = nativeTable;
this.icebergProperties = icebergProperties;
Optional<Snapshot> snapshot = getSnapshot();
this.cachedSnapshotId = snapshot.map(Snapshot::snapshotId).orElse(-1L);
}
public String getCatalogName() {
return catalogName == null ? getResourceMappingCatalogName(resourceName, "iceberg") : catalogName;
}
public String getResourceName() {
return resourceName;
}
public String getRemoteDbName() {
return remoteDbName;
}
public String getRemoteTableName() {
return remoteTableName;
}
public Optional<Snapshot> getSnapshot() {
if (snapshot.isPresent()) {
return snapshot;
} else {
snapshot = Optional.ofNullable(getNativeTable().currentSnapshot());
return snapshot;
}
}
public long getCachedSnapshotId() {
return cachedSnapshotId;
}
public List<String> getCachedPartitionNames() {
readLock();
try {
return cachedPartitionNames;
} finally {
readUnlock();
}
}
@Override
public String getUUID() {
if (CatalogMgr.isExternalCatalog(catalogName)) {
return String.join(".", catalogName, remoteDbName, remoteTableName,
((BaseTable) getNativeTable()).operations().current().uuid());
} else {
return Long.toString(id);
}
}
public List<Column> getPartitionColumns() {
if (partitionColumns == null) {
List<PartitionField> identityPartitionFields = this.getNativeTable().spec().fields().stream().
filter(partitionField -> partitionField.transform().isIdentity()).collect(Collectors.toList());
partitionColumns = identityPartitionFields.stream().map(partitionField -> getColumn(partitionField.name()))
.collect(Collectors.toList());
}
return partitionColumns;
}
public List<Integer> partitionColumnIndexes() {
List<Column> partitionCols = getPartitionColumns();
return partitionCols.stream().map(col -> fullSchema.indexOf(col)).collect(Collectors.toList());
}
public List<Integer> getSortKeyIndexes() {
List<Integer> indexes = new ArrayList<>();
org.apache.iceberg.Table nativeTable = getNativeTable();
List<Types.NestedField> fields = nativeTable.schema().asStruct().fields();
List<Integer> sortFieldSourceIds = nativeTable.sortOrder().fields().stream()
.map(SortField::sourceId)
.collect(Collectors.toList());
for (int i = 0; i < fields.size(); i++) {
Types.NestedField field = fields.get(i);
if (sortFieldSourceIds.contains(field.fieldId())) {
indexes.add(i);
}
}
return indexes;
}
public boolean isUnPartitioned() {
return getPartitionColumns().size() == 0;
}
public List<String> getPartitionColumnNames() {
return getPartitionColumns().stream().filter(java.util.Objects::nonNull).map(Column::getName)
.collect(Collectors.toList());
}
@Override
public String getTableIdentifier() {
return Joiner.on(":").join(name, ((BaseTable) getNativeTable()).operations().current().uuid());
}
public IcebergCatalogType getCatalogType() {
return IcebergCatalogType.valueOf(icebergProperties.get(ICEBERG_CATALOG_TYPE));
}
public String getTableLocation() {
return getNativeTable().location();
}
public org.apache.iceberg.Table getNativeTable() {
if (nativeTable == null) {
IcebergTable resourceMappingTable = (IcebergTable) GlobalStateMgr.getCurrentState().getMetadataMgr()
.getTable(getCatalogName(), remoteDbName, remoteTableName);
if (resourceMappingTable == null) {
throw new StarRocksConnectorException("Can't find table %s.%s.%s",
getCatalogName(), remoteDbName, remoteTableName);
}
nativeTable = resourceMappingTable.getNativeTable();
}
return nativeTable;
}
public long getRefreshSnapshotTime() {
return refreshSnapshotTime;
}
public void setRefreshSnapshotTime(long refreshSnapshotTime) {
this.refreshSnapshotTime = refreshSnapshotTime;
}
@Override
public TTableDescriptor toThrift(List<DescriptorTable.ReferencedPartitionInfo> partitions) {
Preconditions.checkNotNull(partitions);
TIcebergTable tIcebergTable = new TIcebergTable();
List<TColumn> tColumns = Lists.newArrayList();
for (Column column : getBaseSchema()) {
tColumns.add(column.toThrift());
}
tIcebergTable.setColumns(tColumns);
tIcebergTable.setIceberg_schema(IcebergApiConverter.getTIcebergSchema(nativeTable.schema()));
tIcebergTable.setPartition_column_names(getPartitionColumnNames());
TTableDescriptor tTableDescriptor = new TTableDescriptor(id, TTableType.ICEBERG_TABLE,
fullSchema.size(), 0, remoteTableName, remoteDbName);
tTableDescriptor.setIcebergTable(tIcebergTable);
return tTableDescriptor;
}
@Override
public void write(DataOutput out) throws IOException {
super.write(out);
JsonObject jsonObject = new JsonObject();
jsonObject.addProperty(JSON_KEY_ICEBERG_DB, remoteDbName);
jsonObject.addProperty(JSON_KEY_ICEBERG_TABLE, remoteTableName);
if (!Strings.isNullOrEmpty(resourceName)) {
jsonObject.addProperty(JSON_KEY_RESOURCE_NAME, resourceName);
}
if (!icebergProperties.isEmpty()) {
JsonObject jIcebergProperties = new JsonObject();
for (Map.Entry<String, String> entry : icebergProperties.entrySet()) {
jIcebergProperties.addProperty(entry.getKey(), entry.getValue());
}
jsonObject.add(JSON_KEY_ICEBERG_PROPERTIES, jIcebergProperties);
}
Text.writeString(out, jsonObject.toString());
}
@Override
public void readFields(DataInput in) throws IOException {
super.readFields(in);
String json = Text.readString(in);
JsonObject jsonObject = JsonParser.parseString(json).getAsJsonObject();
remoteDbName = jsonObject.getAsJsonPrimitive(JSON_KEY_ICEBERG_DB).getAsString();
remoteTableName = jsonObject.getAsJsonPrimitive(JSON_KEY_ICEBERG_TABLE).getAsString();
resourceName = jsonObject.getAsJsonPrimitive(JSON_KEY_RESOURCE_NAME).getAsString();
if (jsonObject.has(JSON_KEY_ICEBERG_PROPERTIES)) {
JsonObject jIcebergProperties = jsonObject.getAsJsonObject(JSON_KEY_ICEBERG_PROPERTIES);
for (Map.Entry<String, JsonElement> entry : jIcebergProperties.entrySet()) {
icebergProperties.put(entry.getKey(), entry.getValue().getAsString());
}
}
}
@Override
public boolean isSupported() {
return true;
}
@Override
public boolean supportInsert() {
return getNativeTable().properties().getOrDefault(DEFAULT_FILE_FORMAT, DEFAULT_FILE_FORMAT_DEFAULT)
.equalsIgnoreCase(PARQUET_FORMAT);
}
@Override
public int hashCode() {
return com.google.common.base.Objects.hashCode(getCatalogName(), remoteDbName, getTableIdentifier());
}
@Override
public boolean equals(Object other) {
if (!(other instanceof IcebergTable)) {
return false;
}
IcebergTable otherTable = (IcebergTable) other;
String catalogName = getCatalogName();
String tableIdentifier = getTableIdentifier();
return Objects.equal(catalogName, otherTable.getCatalogName()) &&
Objects.equal(remoteDbName, otherTable.remoteDbName) &&
Objects.equal(tableIdentifier, otherTable.getTableIdentifier());
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private long id;
private String srTableName;
private String catalogName;
private String resourceName;
private String remoteDbName;
private String remoteTableName;
private List<Column> fullSchema;
private Map<String, String> icebergProperties;
private org.apache.iceberg.Table nativeTable;
private long snapshotId;
public Builder() {
}
public Builder setId(long id) {
this.id = id;
return this;
}
public Builder setSrTableName(String srTableName) {
this.srTableName = srTableName;
return this;
}
public Builder setCatalogName(String catalogName) {
this.catalogName = catalogName;
return this;
}
public Builder setResourceName(String resourceName) {
this.resourceName = resourceName;
return this;
}
public Builder setRemoteDbName(String remoteDbName) {
this.remoteDbName = remoteDbName;
return this;
}
public Builder setRemoteTableName(String remoteTableName) {
this.remoteTableName = remoteTableName;
return this;
}
public Builder setFullSchema(List<Column> fullSchema) {
this.fullSchema = fullSchema;
return this;
}
public Builder setIcebergProperties(Map<String, String> icebergProperties) {
this.icebergProperties = icebergProperties;
return this;
}
public Builder setNativeTable(org.apache.iceberg.Table nativeTable) {
this.nativeTable = nativeTable;
return this;
}
public Builder setSnapshotId(long snapshotId) {
this.snapshotId = snapshotId;
return this;
}
public IcebergTable build() {
return new IcebergTable(id, srTableName, catalogName, resourceName, remoteDbName, remoteTableName,
fullSchema, nativeTable, icebergProperties);
}
}
private void writeLock() {
lock.writeLock().lock();
}
private void writeUnlock() {
lock.writeLock().unlock();
}
private void readLock() {
lock.readLock().lock();
}
private void readUnlock() {
lock.readLock().unlock();
}
} |
when `enableBucketShuffleJoin = false`, we should never do bucket shuffle at all. so when `isEnableBucketShuffleJoin = false`, function `isBucketShuffleDownGrade` should always return true? | private boolean isBucketShuffleDownGrade(Plan oneSidePlan, DistributionSpecHash otherSideSpec) {
boolean isBucketShuffleDownGrade = ConnectContext.get().getSessionVariable().isEnableBucketShuffleJoin();
if (!isBucketShuffleDownGrade) {
return false;
} else if (otherSideSpec.getShuffleType() != ShuffleType.EXECUTION_BUCKETED
|| !(oneSidePlan instanceof GroupPlan)) {
return false;
} else {
PhysicalOlapScan candidate = findDownGradeBucketShuffleCandidate((GroupPlan) oneSidePlan);
if (candidate == null || candidate.getTable() == null
|| candidate.getTable().getDefaultDistributionInfo() == null) {
return false;
} else {
int prunedPartNum = candidate.getSelectedPartitionIds().size();
int bucketNum = candidate.getTable().getDefaultDistributionInfo().getBucketNum();
int totalBucketNum = prunedPartNum * bucketNum;
int backEndNum = Math.max(1, ConnectContext.get().getEnv().getClusterInfo()
.getBackendsNumber(true));
int paraNum = Math.max(1, ConnectContext.get().getSessionVariable().getParallelExecInstanceNum());
int totalParaNum = Math.min(10, backEndNum * paraNum);
return totalBucketNum < totalParaNum;
}
}
} | boolean isBucketShuffleDownGrade = ConnectContext.get().getSessionVariable().isEnableBucketShuffleJoin(); | private boolean isBucketShuffleDownGrade(Plan oneSidePlan, DistributionSpecHash otherSideSpec) {
boolean isEnableBucketShuffleJoin = ConnectContext.get().getSessionVariable().isEnableBucketShuffleJoin();
if (!isEnableBucketShuffleJoin) {
return true;
} else if (otherSideSpec.getShuffleType() != ShuffleType.EXECUTION_BUCKETED
|| !(oneSidePlan instanceof GroupPlan)) {
return false;
} else {
PhysicalOlapScan candidate = findDownGradeBucketShuffleCandidate((GroupPlan) oneSidePlan);
if (candidate == null || candidate.getTable() == null
|| candidate.getTable().getDefaultDistributionInfo() == null) {
return false;
} else {
int prunedPartNum = candidate.getSelectedPartitionIds().size();
int bucketNum = candidate.getTable().getDefaultDistributionInfo().getBucketNum();
int totalBucketNum = prunedPartNum * bucketNum;
int backEndNum = Math.max(1, ConnectContext.get().getEnv().getClusterInfo()
.getBackendsNumber(true));
int paraNum = Math.max(1, ConnectContext.get().getSessionVariable().getParallelExecInstanceNum());
int totalParaNum = Math.min(10, backEndNum * paraNum);
return totalBucketNum < totalParaNum;
}
}
} | class ChildrenPropertiesRegulator extends PlanVisitor<Boolean, Void> {
private final GroupExpression parent;
private final List<GroupExpression> children;
private final List<PhysicalProperties> childrenProperties;
private final List<PhysicalProperties> requiredProperties;
private final JobContext jobContext;
public ChildrenPropertiesRegulator(GroupExpression parent, List<GroupExpression> children,
List<PhysicalProperties> childrenProperties, List<PhysicalProperties> requiredProperties,
JobContext jobContext) {
this.parent = parent;
this.children = children;
this.childrenProperties = childrenProperties;
this.requiredProperties = requiredProperties;
this.jobContext = jobContext;
}
/**
* adjust children properties
*
* @return enforce cost.
*/
public boolean adjustChildrenProperties() {
return parent.getPlan().accept(this, null);
}
@Override
public Boolean visit(Plan plan, Void context) {
for (int i = 0; i < children.size(); i++) {
DistributionSpec distributionSpec = childrenProperties.get(i).getDistributionSpec();
if (distributionSpec instanceof DistributionSpecMustShuffle) {
updateChildEnforceAndCost(i, PhysicalProperties.EXECUTION_ANY);
}
}
return true;
}
@Override
public Boolean visitPhysicalHashAggregate(PhysicalHashAggregate<? extends Plan> agg, Void context) {
if (agg.getGroupByExpressions().isEmpty() && agg.getOutputExpressions().isEmpty()) {
return false;
}
if (!agg.getAggregateParam().canBeBanned) {
return true;
}
if (agg.getAggMode() == AggMode.INPUT_TO_RESULT && children.get(0).getPlan() instanceof PhysicalDistribute) {
return false;
}
if (agg.getAggMode() == AggMode.INPUT_TO_BUFFER
&& requiredProperties.get(0).getDistributionSpec() instanceof DistributionSpecHash
&& children.get(0).getPlan() instanceof PhysicalDistribute) {
return false;
}
if (agg.getAggMode() == AggMode.INPUT_TO_RESULT
&& children.get(0).getPlan() instanceof PhysicalUnion
&& !((PhysicalUnion) children.get(0).getPlan()).isDistinct()) {
return false;
}
if (agg.getAggMode() == AggMode.INPUT_TO_BUFFER || agg.getAggMode() == AggMode.INPUT_TO_RESULT) {
List<MultiDistinction> multiDistinctions = agg.getOutputExpressions().stream()
.filter(Alias.class::isInstance)
.map(a -> ((Alias) a).child())
.filter(AggregateExpression.class::isInstance)
.map(a -> ((AggregateExpression) a).getFunction())
.filter(MultiDistinction.class::isInstance)
.map(MultiDistinction.class::cast)
.collect(Collectors.toList());
if (multiDistinctions.size() == 1) {
Expression distinctChild = multiDistinctions.get(0).child(0);
DistributionSpec childDistribution = childrenProperties.get(0).getDistributionSpec();
if (distinctChild instanceof SlotReference && childDistribution instanceof DistributionSpecHash) {
SlotReference slotReference = (SlotReference) distinctChild;
DistributionSpecHash distributionSpecHash = (DistributionSpecHash) childDistribution;
List<ExprId> groupByColumns = agg.getGroupByExpressions().stream()
.map(SlotReference.class::cast)
.map(SlotReference::getExprId)
.collect(Collectors.toList());
DistributionSpecHash groupByRequire = new DistributionSpecHash(
groupByColumns, ShuffleType.REQUIRE);
List<ExprId> distinctChildColumns = Lists.newArrayList(slotReference.getExprId());
distinctChildColumns.add(slotReference.getExprId());
DistributionSpecHash distinctChildRequire = new DistributionSpecHash(
distinctChildColumns, ShuffleType.REQUIRE);
if ((!groupByColumns.isEmpty() && distributionSpecHash.satisfy(groupByRequire))
|| (groupByColumns.isEmpty() && distributionSpecHash.satisfy(distinctChildRequire))) {
return false;
}
}
if (agg.getOutputExpressions().size() == 1 && agg.getGroupByExpressions().isEmpty()) {
return false;
}
}
}
visit(agg, context);
return true;
}
@Override
public Boolean visitPhysicalPartitionTopN(PhysicalPartitionTopN<? extends Plan> partitionTopN, Void context) {
if (partitionTopN.getPhase().isOnePhaseGlobal() && children.get(0).getPlan() instanceof PhysicalDistribute) {
return false;
} else if (partitionTopN.getPhase().isTwoPhaseGlobal()
&& !(children.get(0).getPlan() instanceof PhysicalDistribute)) {
return false;
} else {
visit(partitionTopN, context);
return true;
}
}
@Override
public Boolean visitPhysicalFilter(PhysicalFilter<? extends Plan> filter, Void context) {
return true;
}
private PhysicalOlapScan findDownGradeBucketShuffleCandidate(GroupPlan groupPlan) {
if (groupPlan == null || groupPlan.getGroup() == null
|| groupPlan.getGroup().getPhysicalExpressions().isEmpty()) {
return null;
} else {
Plan targetPlan = groupPlan.getGroup().getPhysicalExpressions().get(0).getPlan();
while (targetPlan != null
&& (targetPlan instanceof PhysicalProject || targetPlan instanceof PhysicalFilter)
&& !((GroupPlan) targetPlan.child(0)).getGroup().getPhysicalExpressions().isEmpty()) {
targetPlan = ((GroupPlan) targetPlan.child(0)).getGroup()
.getPhysicalExpressions().get(0).getPlan();
}
if (targetPlan == null || !(targetPlan instanceof PhysicalOlapScan)) {
return null;
} else {
return (PhysicalOlapScan) targetPlan;
}
}
}
private boolean couldNotRightBucketShuffleJoin(JoinType joinType, DistributionSpecHash leftHashSpec,
DistributionSpecHash rightHashSpec) {
boolean isJoinTypeInScope = (joinType == JoinType.RIGHT_ANTI_JOIN
|| joinType == JoinType.RIGHT_OUTER_JOIN
|| joinType == JoinType.FULL_OUTER_JOIN);
boolean isSpecInScope = (leftHashSpec.getShuffleType() == ShuffleType.NATURAL
|| rightHashSpec.getShuffleType() == ShuffleType.NATURAL);
return isJoinTypeInScope && isSpecInScope;
}
@Override
public Boolean visitPhysicalHashJoin(PhysicalHashJoin<? extends Plan, ? extends Plan> hashJoin,
Void context) {
Preconditions.checkArgument(children.size() == 2, "children.size() != 2");
Preconditions.checkArgument(childrenProperties.size() == 2);
Preconditions.checkArgument(requiredProperties.size() == 2);
visit(hashJoin, context);
DistributionSpec leftDistributionSpec = childrenProperties.get(0).getDistributionSpec();
DistributionSpec rightDistributionSpec = childrenProperties.get(1).getDistributionSpec();
if (rightDistributionSpec instanceof DistributionSpecReplicated) {
return true;
}
if (!(leftDistributionSpec instanceof DistributionSpecHash)
|| !(rightDistributionSpec instanceof DistributionSpecHash)) {
throw new RuntimeException("should not come here, two children of shuffle join should all be shuffle");
}
Plan leftChild = hashJoin.child(0);
Plan rightChild = hashJoin.child(1);
DistributionSpecHash leftHashSpec = (DistributionSpecHash) leftDistributionSpec;
DistributionSpecHash rightHashSpec = (DistributionSpecHash) rightDistributionSpec;
Optional<PhysicalProperties> updatedForLeft = Optional.empty();
Optional<PhysicalProperties> updatedForRight = Optional.empty();
if (JoinUtils.couldColocateJoin(leftHashSpec, rightHashSpec)) {
return true;
} else if (couldNotRightBucketShuffleJoin(hashJoin.getJoinType(), leftHashSpec, rightHashSpec)) {
updatedForLeft = Optional.of(calAnotherSideRequired(
ShuffleType.EXECUTION_BUCKETED, leftHashSpec, leftHashSpec,
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec()));
updatedForRight = Optional.of(calAnotherSideRequired(
ShuffleType.EXECUTION_BUCKETED, leftHashSpec, rightHashSpec,
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec()));
} else if (isBucketShuffleDownGrade(leftChild, rightHashSpec)) {
updatedForLeft = Optional.of(calAnotherSideRequired(
ShuffleType.EXECUTION_BUCKETED, leftHashSpec, leftHashSpec,
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec()));
updatedForRight = Optional.of(calAnotherSideRequired(
ShuffleType.EXECUTION_BUCKETED, leftHashSpec, rightHashSpec,
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec()));
} else if (isBucketShuffleDownGrade(rightChild, leftHashSpec)) {
updatedForLeft = Optional.of(calAnotherSideRequired(
ShuffleType.EXECUTION_BUCKETED, rightHashSpec, leftHashSpec,
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec()));
updatedForRight = Optional.of(calAnotherSideRequired(
ShuffleType.EXECUTION_BUCKETED, rightHashSpec, rightHashSpec,
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec()));
} else if ((leftHashSpec.getShuffleType() == ShuffleType.NATURAL
&& rightHashSpec.getShuffleType() == ShuffleType.NATURAL)) {
updatedForRight = Optional.of(calAnotherSideRequired(
ShuffleType.STORAGE_BUCKETED, leftHashSpec, rightHashSpec,
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec()));
} else if (leftHashSpec.getShuffleType() == ShuffleType.NATURAL
&& rightHashSpec.getShuffleType() == ShuffleType.EXECUTION_BUCKETED) {
updatedForRight = Optional.of(calAnotherSideRequired(
ShuffleType.STORAGE_BUCKETED, leftHashSpec, rightHashSpec,
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec()));
} else if (leftHashSpec.getShuffleType() == ShuffleType.NATURAL
&& rightHashSpec.getShuffleType() == ShuffleType.STORAGE_BUCKETED) {
if (bothSideShuffleKeysAreSameOrder(leftHashSpec, rightHashSpec,
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec())) {
return true;
}
updatedForRight = Optional.of(calAnotherSideRequired(
ShuffleType.STORAGE_BUCKETED, leftHashSpec, rightHashSpec,
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec()));
} else if (leftHashSpec.getShuffleType() == ShuffleType.EXECUTION_BUCKETED
&& rightHashSpec.getShuffleType() == ShuffleType.NATURAL) {
updatedForRight = Optional.of(calAnotherSideRequired(
ShuffleType.EXECUTION_BUCKETED, leftHashSpec, rightHashSpec,
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec()));
} else if (leftHashSpec.getShuffleType() == ShuffleType.EXECUTION_BUCKETED
&& rightHashSpec.getShuffleType() == ShuffleType.EXECUTION_BUCKETED) {
if (bothSideShuffleKeysAreSameOrder(rightHashSpec, leftHashSpec,
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec())) {
return true;
}
updatedForRight = Optional.of(calAnotherSideRequired(
ShuffleType.EXECUTION_BUCKETED, leftHashSpec, rightHashSpec,
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec()));
} else if ((leftHashSpec.getShuffleType() == ShuffleType.EXECUTION_BUCKETED
&& rightHashSpec.getShuffleType() == ShuffleType.STORAGE_BUCKETED)) {
if (children.get(0).getPlan() instanceof PhysicalDistribute) {
updatedForLeft = Optional.of(calAnotherSideRequired(
ShuffleType.STORAGE_BUCKETED, rightHashSpec, leftHashSpec,
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec()));
} else {
updatedForRight = Optional.of(calAnotherSideRequired(
ShuffleType.EXECUTION_BUCKETED, leftHashSpec, rightHashSpec,
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec()));
}
} else if ((leftHashSpec.getShuffleType() == ShuffleType.STORAGE_BUCKETED
&& rightHashSpec.getShuffleType() == ShuffleType.NATURAL)) {
updatedForRight = Optional.of(calAnotherSideRequired(
ShuffleType.STORAGE_BUCKETED, leftHashSpec, rightHashSpec,
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec()));
} else if ((leftHashSpec.getShuffleType() == ShuffleType.STORAGE_BUCKETED
&& rightHashSpec.getShuffleType() == ShuffleType.EXECUTION_BUCKETED)) {
if (children.get(0).getPlan() instanceof PhysicalDistribute) {
updatedForLeft = Optional.of(calAnotherSideRequired(
ShuffleType.EXECUTION_BUCKETED, rightHashSpec, leftHashSpec,
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec()));
} else {
updatedForRight = Optional.of(calAnotherSideRequired(
ShuffleType.STORAGE_BUCKETED, leftHashSpec, rightHashSpec,
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec()));
}
} else if ((leftHashSpec.getShuffleType() == ShuffleType.STORAGE_BUCKETED
&& rightHashSpec.getShuffleType() == ShuffleType.STORAGE_BUCKETED)) {
if (bothSideShuffleKeysAreSameOrder(rightHashSpec, leftHashSpec,
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec())) {
return true;
}
if (children.get(0).getPlan() instanceof PhysicalDistribute) {
updatedForLeft = Optional.of(calAnotherSideRequired(
ShuffleType.STORAGE_BUCKETED, rightHashSpec, leftHashSpec,
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec()));
} else {
updatedForRight = Optional.of(calAnotherSideRequired(
ShuffleType.STORAGE_BUCKETED, leftHashSpec, rightHashSpec,
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec()));
}
}
updatedForLeft.ifPresent(physicalProperties -> updateChildEnforceAndCost(0, physicalProperties));
updatedForRight.ifPresent(physicalProperties -> updateChildEnforceAndCost(1, physicalProperties));
return true;
}
@Override
public Boolean visitPhysicalNestedLoopJoin(PhysicalNestedLoopJoin<? extends Plan, ? extends Plan> nestedLoopJoin,
Void context) {
Preconditions.checkArgument(children.size() == 2, String.format("children.size() is %d", children.size()));
Preconditions.checkArgument(childrenProperties.size() == 2);
Preconditions.checkArgument(requiredProperties.size() == 2);
visit(nestedLoopJoin, context);
DistributionSpec rightDistributionSpec = childrenProperties.get(1).getDistributionSpec();
if (rightDistributionSpec instanceof DistributionSpecStorageGather) {
updateChildEnforceAndCost(1, PhysicalProperties.GATHER);
}
return true;
}
@Override
public Boolean visitPhysicalProject(PhysicalProject<? extends Plan> project, Void context) {
return true;
}
@Override
public Boolean visitPhysicalSetOperation(PhysicalSetOperation setOperation, Void context) {
visit(setOperation, context);
if (children.isEmpty()) {
return true;
}
PhysicalProperties requiredProperty = requiredProperties.get(0);
DistributionSpec requiredDistributionSpec = requiredProperty.getDistributionSpec();
if (requiredDistributionSpec instanceof DistributionSpecGather) {
for (int i = 0; i < childrenProperties.size(); i++) {
if (childrenProperties.get(i).getDistributionSpec() instanceof DistributionSpecStorageGather) {
updateChildEnforceAndCost(i, PhysicalProperties.GATHER);
}
}
} else if (requiredDistributionSpec instanceof DistributionSpecAny) {
for (int i = 0; i < childrenProperties.size(); i++) {
if (childrenProperties.get(i).getDistributionSpec() instanceof DistributionSpecStorageAny
|| childrenProperties.get(i).getDistributionSpec() instanceof DistributionSpecStorageGather
|| childrenProperties.get(i).getDistributionSpec() instanceof DistributionSpecGather
|| (childrenProperties.get(i).getDistributionSpec() instanceof DistributionSpecHash
&& ((DistributionSpecHash) childrenProperties.get(i).getDistributionSpec())
.getShuffleType() == ShuffleType.NATURAL)) {
updateChildEnforceAndCost(i, PhysicalProperties.EXECUTION_ANY);
}
}
} else if (requiredDistributionSpec instanceof DistributionSpecHash) {
DistributionSpecHash basic = (DistributionSpecHash) requiredDistributionSpec;
for (int i = 0; i < childrenProperties.size(); i++) {
DistributionSpecHash current = (DistributionSpecHash) childrenProperties.get(i).getDistributionSpec();
if (current.getShuffleType() != ShuffleType.EXECUTION_BUCKETED
|| !bothSideShuffleKeysAreSameOrder(basic, current,
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(i).getDistributionSpec())) {
PhysicalProperties target = calAnotherSideRequired(
ShuffleType.EXECUTION_BUCKETED, basic, current,
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(i).getDistributionSpec());
updateChildEnforceAndCost(i, target);
}
}
}
return true;
}
@Override
public Boolean visitAbstractPhysicalSort(AbstractPhysicalSort<? extends Plan> sort, Void context) {
visit(sort, context);
if (sort.getSortPhase() == SortPhase.GATHER_SORT && sort.child() instanceof PhysicalDistribute) {
return false;
}
return true;
}
/**
* check both side real output hash key order are same or not.
*
* @param notShuffleSideOutput not shuffle side real output used hash spec
* @param shuffleSideOutput shuffle side real output used hash spec
* @param notShuffleSideRequired not shuffle side required used hash spec
* @param shuffleSideRequired shuffle side required hash spec
* @return true if same
*/
private boolean bothSideShuffleKeysAreSameOrder(
DistributionSpecHash notShuffleSideOutput, DistributionSpecHash shuffleSideOutput,
DistributionSpecHash notShuffleSideRequired, DistributionSpecHash shuffleSideRequired) {
List<ExprId> shuffleSideOutputList = shuffleSideOutput.getOrderedShuffledColumns();
List<ExprId> notShuffleSideOutputList = calAnotherSideRequiredShuffleIds(notShuffleSideOutput,
notShuffleSideRequired, shuffleSideRequired);
if (shuffleSideOutputList.size() != notShuffleSideOutputList.size()) {
return false;
} else if (shuffleSideOutputList.equals(notShuffleSideOutputList)) {
return true;
} else {
boolean isSatisfy = true;
for (int i = 0; i < shuffleSideOutputList.size() && isSatisfy; i++) {
ExprId shuffleSideExprId = shuffleSideOutputList.get(i);
ExprId notShuffleSideExprId = notShuffleSideOutputList.get(i);
if (!(shuffleSideExprId.equals(notShuffleSideExprId)
|| shuffleSideOutput.getEquivalenceExprIdsOf(shuffleSideExprId)
.contains(notShuffleSideExprId))) {
isSatisfy = false;
}
}
return isSatisfy;
}
}
/**
* calculate the shuffle side hash key right orders.
* For example,
* if not shuffle side real hash key is 1 2 3.
* the requirement of hash key of not shuffle side is 3 2 1.
* the requirement of hash key of shuffle side is 6 5 4.
* then we should let the shuffle side real output hash key order as 4 5 6
*
* @param notShuffleSideOutput not shuffle side real output used hash spec
* @param notShuffleSideRequired not shuffle side required used hash spec
* @param shuffleSideRequired shuffle side required hash spec
* @return shuffle side real output used hash key order
*/
private List<ExprId> calAnotherSideRequiredShuffleIds(DistributionSpecHash notShuffleSideOutput,
DistributionSpecHash notShuffleSideRequired, DistributionSpecHash shuffleSideRequired) {
ImmutableList.Builder<ExprId> rightShuffleIds = ImmutableList.builder();
for (ExprId scanId : notShuffleSideOutput.getOrderedShuffledColumns()) {
int index = notShuffleSideRequired.getOrderedShuffledColumns().indexOf(scanId);
if (index == -1) {
Set<ExprId> equivalentExprIds = notShuffleSideOutput.getEquivalenceExprIdsOf(scanId);
for (ExprId alternativeExpr : equivalentExprIds) {
index = notShuffleSideRequired.getOrderedShuffledColumns().indexOf(alternativeExpr);
if (index != -1) {
break;
}
}
}
Preconditions.checkState(index != -1, "index could not be -1");
rightShuffleIds.add(shuffleSideRequired.getOrderedShuffledColumns().get(index));
}
return rightShuffleIds.build();
}
/**
* generate shuffle side real output should follow PhysicalProperties. More info could see
* calAnotherSideRequiredShuffleIds's comment.
*
* @param shuffleType real output shuffle type
* @param notShuffleSideOutput not shuffle side real output used hash spec
* @param shuffleSideOutput shuffle side real output used hash spec
* @param notShuffleSideRequired not shuffle side required used hash spec
* @param shuffleSideRequired shuffle side required hash spec
* @return shuffle side new required hash spec
*/
private PhysicalProperties calAnotherSideRequired(ShuffleType shuffleType,
DistributionSpecHash notShuffleSideOutput, DistributionSpecHash shuffleSideOutput,
DistributionSpecHash notShuffleSideRequired, DistributionSpecHash shuffleSideRequired) {
List<ExprId> shuffleSideIds = calAnotherSideRequiredShuffleIds(notShuffleSideOutput,
notShuffleSideRequired, shuffleSideRequired);
return new PhysicalProperties(new DistributionSpecHash(shuffleSideIds, shuffleType,
shuffleSideOutput.getTableId(), shuffleSideOutput.getSelectedIndexId(),
shuffleSideOutput.getPartitionIds()));
}
private void updateChildEnforceAndCost(int index, PhysicalProperties targetProperties) {
GroupExpression child = children.get(index);
Pair<Cost, List<PhysicalProperties>> lowest = child.getLowestCostTable().get(childrenProperties.get(index));
PhysicalProperties output = child.getOutputProperties(childrenProperties.get(index));
DistributionSpec target = targetProperties.getDistributionSpec();
updateChildEnforceAndCost(child, output, target, lowest.first);
childrenProperties.set(index, targetProperties);
}
private void updateChildEnforceAndCost(GroupExpression child, PhysicalProperties childOutput,
DistributionSpec target, Cost currentCost) {
if (child.getPlan() instanceof PhysicalDistribute) {
childOutput = child.getInputPropertiesList(childOutput).get(0);
Pair<Cost, GroupExpression> newChildAndCost = child.getOwnerGroup().getLowestCostPlan(childOutput).get();
child = newChildAndCost.second;
currentCost = newChildAndCost.first;
}
PhysicalProperties newOutputProperty = new PhysicalProperties(target);
GroupExpression enforcer = target.addEnforcer(child.getOwnerGroup());
child.getOwnerGroup().addEnforcer(enforcer);
ConnectContext connectContext = jobContext.getCascadesContext().getConnectContext();
Cost totalCost = CostCalculator.addChildCost(connectContext, enforcer.getPlan(),
CostCalculator.calculateCost(connectContext, enforcer, Lists.newArrayList(childOutput)),
currentCost,
0);
if (enforcer.updateLowestCostTable(newOutputProperty,
Lists.newArrayList(childOutput), totalCost)) {
enforcer.putOutputPropertiesMap(newOutputProperty, newOutputProperty);
}
child.getOwnerGroup().setBestPlan(enforcer, totalCost, newOutputProperty);
}
} | class ChildrenPropertiesRegulator extends PlanVisitor<Boolean, Void> {
private final GroupExpression parent;
private final List<GroupExpression> children;
private final List<PhysicalProperties> childrenProperties;
private final List<PhysicalProperties> requiredProperties;
private final JobContext jobContext;
public ChildrenPropertiesRegulator(GroupExpression parent, List<GroupExpression> children,
List<PhysicalProperties> childrenProperties, List<PhysicalProperties> requiredProperties,
JobContext jobContext) {
this.parent = parent;
this.children = children;
this.childrenProperties = childrenProperties;
this.requiredProperties = requiredProperties;
this.jobContext = jobContext;
}
/**
* adjust children properties
*
* @return enforce cost.
*/
public boolean adjustChildrenProperties() {
return parent.getPlan().accept(this, null);
}
@Override
public Boolean visit(Plan plan, Void context) {
for (int i = 0; i < children.size(); i++) {
DistributionSpec distributionSpec = childrenProperties.get(i).getDistributionSpec();
if (distributionSpec instanceof DistributionSpecMustShuffle) {
updateChildEnforceAndCost(i, PhysicalProperties.EXECUTION_ANY);
}
}
return true;
}
@Override
public Boolean visitPhysicalHashAggregate(PhysicalHashAggregate<? extends Plan> agg, Void context) {
if (agg.getGroupByExpressions().isEmpty() && agg.getOutputExpressions().isEmpty()) {
return false;
}
if (!agg.getAggregateParam().canBeBanned) {
return true;
}
if (agg.getAggMode() == AggMode.INPUT_TO_RESULT && children.get(0).getPlan() instanceof PhysicalDistribute) {
return false;
}
if (agg.getAggMode() == AggMode.INPUT_TO_BUFFER
&& requiredProperties.get(0).getDistributionSpec() instanceof DistributionSpecHash
&& children.get(0).getPlan() instanceof PhysicalDistribute) {
return false;
}
if (agg.getAggMode() == AggMode.INPUT_TO_RESULT
&& children.get(0).getPlan() instanceof PhysicalUnion
&& !((PhysicalUnion) children.get(0).getPlan()).isDistinct()) {
return false;
}
if (agg.getAggMode() == AggMode.INPUT_TO_BUFFER || agg.getAggMode() == AggMode.INPUT_TO_RESULT) {
List<MultiDistinction> multiDistinctions = agg.getOutputExpressions().stream()
.filter(Alias.class::isInstance)
.map(a -> ((Alias) a).child())
.filter(AggregateExpression.class::isInstance)
.map(a -> ((AggregateExpression) a).getFunction())
.filter(MultiDistinction.class::isInstance)
.map(MultiDistinction.class::cast)
.collect(Collectors.toList());
if (multiDistinctions.size() == 1) {
Expression distinctChild = multiDistinctions.get(0).child(0);
DistributionSpec childDistribution = childrenProperties.get(0).getDistributionSpec();
if (distinctChild instanceof SlotReference && childDistribution instanceof DistributionSpecHash) {
SlotReference slotReference = (SlotReference) distinctChild;
DistributionSpecHash distributionSpecHash = (DistributionSpecHash) childDistribution;
List<ExprId> groupByColumns = agg.getGroupByExpressions().stream()
.map(SlotReference.class::cast)
.map(SlotReference::getExprId)
.collect(Collectors.toList());
DistributionSpecHash groupByRequire = new DistributionSpecHash(
groupByColumns, ShuffleType.REQUIRE);
List<ExprId> distinctChildColumns = Lists.newArrayList(slotReference.getExprId());
distinctChildColumns.add(slotReference.getExprId());
DistributionSpecHash distinctChildRequire = new DistributionSpecHash(
distinctChildColumns, ShuffleType.REQUIRE);
if ((!groupByColumns.isEmpty() && distributionSpecHash.satisfy(groupByRequire))
|| (groupByColumns.isEmpty() && distributionSpecHash.satisfy(distinctChildRequire))) {
return false;
}
}
if (agg.getOutputExpressions().size() == 1 && agg.getGroupByExpressions().isEmpty()) {
return false;
}
}
}
visit(agg, context);
return true;
}
@Override
public Boolean visitPhysicalPartitionTopN(PhysicalPartitionTopN<? extends Plan> partitionTopN, Void context) {
if (partitionTopN.getPhase().isOnePhaseGlobal() && children.get(0).getPlan() instanceof PhysicalDistribute) {
return false;
} else if (partitionTopN.getPhase().isTwoPhaseGlobal()
&& !(children.get(0).getPlan() instanceof PhysicalDistribute)) {
return false;
} else {
visit(partitionTopN, context);
return true;
}
}
@Override
public Boolean visitPhysicalFilter(PhysicalFilter<? extends Plan> filter, Void context) {
return true;
}
private PhysicalOlapScan findDownGradeBucketShuffleCandidate(GroupPlan groupPlan) {
if (groupPlan == null || groupPlan.getGroup() == null
|| groupPlan.getGroup().getPhysicalExpressions().isEmpty()) {
return null;
} else {
Plan targetPlan = groupPlan.getGroup().getPhysicalExpressions().get(0).getPlan();
while (targetPlan != null
&& (targetPlan instanceof PhysicalProject || targetPlan instanceof PhysicalFilter)
&& !((GroupPlan) targetPlan.child(0)).getGroup().getPhysicalExpressions().isEmpty()) {
targetPlan = ((GroupPlan) targetPlan.child(0)).getGroup()
.getPhysicalExpressions().get(0).getPlan();
}
if (targetPlan == null || !(targetPlan instanceof PhysicalOlapScan)) {
return null;
} else {
return (PhysicalOlapScan) targetPlan;
}
}
}
private boolean couldNotRightBucketShuffleJoin(JoinType joinType, DistributionSpecHash leftHashSpec,
DistributionSpecHash rightHashSpec) {
boolean isJoinTypeInScope = (joinType == JoinType.RIGHT_ANTI_JOIN
|| joinType == JoinType.RIGHT_OUTER_JOIN
|| joinType == JoinType.FULL_OUTER_JOIN);
boolean isSpecInScope = (leftHashSpec.getShuffleType() == ShuffleType.NATURAL
|| rightHashSpec.getShuffleType() == ShuffleType.NATURAL);
return isJoinTypeInScope && isSpecInScope && !SessionVariable.canUseNereidsDistributePlanner();
}
@Override
public Boolean visitPhysicalHashJoin(
PhysicalHashJoin<? extends Plan, ? extends Plan> hashJoin, Void context) {
Preconditions.checkArgument(children.size() == 2, "children.size() != 2");
Preconditions.checkArgument(childrenProperties.size() == 2);
Preconditions.checkArgument(requiredProperties.size() == 2);
visit(hashJoin, context);
DistributionSpec leftDistributionSpec = childrenProperties.get(0).getDistributionSpec();
DistributionSpec rightDistributionSpec = childrenProperties.get(1).getDistributionSpec();
if (rightDistributionSpec instanceof DistributionSpecReplicated) {
return true;
}
if (!(leftDistributionSpec instanceof DistributionSpecHash)
|| !(rightDistributionSpec instanceof DistributionSpecHash)) {
throw new RuntimeException("should not come here, two children of shuffle join should all be shuffle");
}
Plan leftChild = hashJoin.child(0);
Plan rightChild = hashJoin.child(1);
DistributionSpecHash leftHashSpec = (DistributionSpecHash) leftDistributionSpec;
DistributionSpecHash rightHashSpec = (DistributionSpecHash) rightDistributionSpec;
Optional<PhysicalProperties> updatedForLeft = Optional.empty();
Optional<PhysicalProperties> updatedForRight = Optional.empty();
if (JoinUtils.couldColocateJoin(leftHashSpec, rightHashSpec)) {
return true;
} else if (couldNotRightBucketShuffleJoin(hashJoin.getJoinType(), leftHashSpec, rightHashSpec)) {
updatedForLeft = Optional.of(calAnotherSideRequired(
ShuffleType.EXECUTION_BUCKETED, leftHashSpec, leftHashSpec,
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec()));
updatedForRight = Optional.of(calAnotherSideRequired(
ShuffleType.EXECUTION_BUCKETED, leftHashSpec, rightHashSpec,
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec()));
} else if (isBucketShuffleDownGrade(leftChild, rightHashSpec)) {
updatedForLeft = Optional.of(calAnotherSideRequired(
ShuffleType.EXECUTION_BUCKETED, leftHashSpec, leftHashSpec,
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec()));
updatedForRight = Optional.of(calAnotherSideRequired(
ShuffleType.EXECUTION_BUCKETED, leftHashSpec, rightHashSpec,
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec()));
} else if (isBucketShuffleDownGrade(rightChild, leftHashSpec)) {
updatedForLeft = Optional.of(calAnotherSideRequired(
ShuffleType.EXECUTION_BUCKETED, rightHashSpec, leftHashSpec,
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec()));
updatedForRight = Optional.of(calAnotherSideRequired(
ShuffleType.EXECUTION_BUCKETED, rightHashSpec, rightHashSpec,
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec()));
} else if ((leftHashSpec.getShuffleType() == ShuffleType.NATURAL
&& rightHashSpec.getShuffleType() == ShuffleType.NATURAL)) {
updatedForRight = Optional.of(calAnotherSideRequired(
ShuffleType.STORAGE_BUCKETED, leftHashSpec, rightHashSpec,
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec()));
} else if (leftHashSpec.getShuffleType() == ShuffleType.NATURAL
&& rightHashSpec.getShuffleType() == ShuffleType.EXECUTION_BUCKETED) {
updatedForRight = Optional.of(calAnotherSideRequired(
ShuffleType.STORAGE_BUCKETED, leftHashSpec, rightHashSpec,
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec()));
} else if (leftHashSpec.getShuffleType() == ShuffleType.NATURAL
&& rightHashSpec.getShuffleType() == ShuffleType.STORAGE_BUCKETED) {
if (bothSideShuffleKeysAreSameOrder(leftHashSpec, rightHashSpec,
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec())) {
return true;
}
updatedForRight = Optional.of(calAnotherSideRequired(
ShuffleType.STORAGE_BUCKETED, leftHashSpec, rightHashSpec,
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec()));
} else if (leftHashSpec.getShuffleType() == ShuffleType.EXECUTION_BUCKETED
&& rightHashSpec.getShuffleType() == ShuffleType.NATURAL) {
if (SessionVariable.canUseNereidsDistributePlanner()) {
updatedForLeft = Optional.of(calAnotherSideRequired(
ShuffleType.STORAGE_BUCKETED, rightHashSpec, leftHashSpec,
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec()));
} else {
updatedForRight = Optional.of(calAnotherSideRequired(
ShuffleType.EXECUTION_BUCKETED, leftHashSpec, rightHashSpec,
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec()));
}
} else if (leftHashSpec.getShuffleType() == ShuffleType.EXECUTION_BUCKETED
&& rightHashSpec.getShuffleType() == ShuffleType.EXECUTION_BUCKETED) {
if (bothSideShuffleKeysAreSameOrder(rightHashSpec, leftHashSpec,
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec())) {
return true;
}
updatedForRight = Optional.of(calAnotherSideRequired(
ShuffleType.EXECUTION_BUCKETED, leftHashSpec, rightHashSpec,
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec()));
} else if ((leftHashSpec.getShuffleType() == ShuffleType.EXECUTION_BUCKETED
&& rightHashSpec.getShuffleType() == ShuffleType.STORAGE_BUCKETED)) {
if (children.get(0).getPlan() instanceof PhysicalDistribute) {
updatedForLeft = Optional.of(calAnotherSideRequired(
ShuffleType.STORAGE_BUCKETED, rightHashSpec, leftHashSpec,
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec()));
} else {
updatedForRight = Optional.of(calAnotherSideRequired(
ShuffleType.EXECUTION_BUCKETED, leftHashSpec, rightHashSpec,
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec()));
}
} else if ((leftHashSpec.getShuffleType() == ShuffleType.STORAGE_BUCKETED
&& rightHashSpec.getShuffleType() == ShuffleType.NATURAL)) {
updatedForRight = Optional.of(calAnotherSideRequired(
ShuffleType.STORAGE_BUCKETED, leftHashSpec, rightHashSpec,
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec()));
} else if ((leftHashSpec.getShuffleType() == ShuffleType.STORAGE_BUCKETED
&& rightHashSpec.getShuffleType() == ShuffleType.EXECUTION_BUCKETED)) {
if (children.get(0).getPlan() instanceof PhysicalDistribute) {
updatedForLeft = Optional.of(calAnotherSideRequired(
ShuffleType.EXECUTION_BUCKETED, rightHashSpec, leftHashSpec,
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec()));
} else {
updatedForRight = Optional.of(calAnotherSideRequired(
ShuffleType.STORAGE_BUCKETED, leftHashSpec, rightHashSpec,
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec()));
}
} else if ((leftHashSpec.getShuffleType() == ShuffleType.STORAGE_BUCKETED
&& rightHashSpec.getShuffleType() == ShuffleType.STORAGE_BUCKETED)) {
if (bothSideShuffleKeysAreSameOrder(rightHashSpec, leftHashSpec,
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec())) {
return true;
}
if (children.get(0).getPlan() instanceof PhysicalDistribute) {
updatedForLeft = Optional.of(calAnotherSideRequired(
ShuffleType.STORAGE_BUCKETED, rightHashSpec, leftHashSpec,
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec()));
} else {
updatedForRight = Optional.of(calAnotherSideRequired(
ShuffleType.STORAGE_BUCKETED, leftHashSpec, rightHashSpec,
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(1).getDistributionSpec()));
}
}
updatedForLeft.ifPresent(physicalProperties -> updateChildEnforceAndCost(0, physicalProperties));
updatedForRight.ifPresent(physicalProperties -> updateChildEnforceAndCost(1, physicalProperties));
return true;
}
@Override
public Boolean visitPhysicalNestedLoopJoin(PhysicalNestedLoopJoin<? extends Plan, ? extends Plan> nestedLoopJoin,
Void context) {
Preconditions.checkArgument(children.size() == 2, String.format("children.size() is %d", children.size()));
Preconditions.checkArgument(childrenProperties.size() == 2);
Preconditions.checkArgument(requiredProperties.size() == 2);
visit(nestedLoopJoin, context);
DistributionSpec rightDistributionSpec = childrenProperties.get(1).getDistributionSpec();
if (rightDistributionSpec instanceof DistributionSpecStorageGather) {
updateChildEnforceAndCost(1, PhysicalProperties.GATHER);
}
return true;
}
@Override
public Boolean visitPhysicalProject(PhysicalProject<? extends Plan> project, Void context) {
return true;
}
@Override
public Boolean visitPhysicalSetOperation(PhysicalSetOperation setOperation, Void context) {
visit(setOperation, context);
if (children.isEmpty()) {
return true;
}
PhysicalProperties requiredProperty = requiredProperties.get(0);
DistributionSpec requiredDistributionSpec = requiredProperty.getDistributionSpec();
if (requiredDistributionSpec instanceof DistributionSpecGather) {
for (int i = 0; i < childrenProperties.size(); i++) {
if (childrenProperties.get(i).getDistributionSpec() instanceof DistributionSpecStorageGather) {
updateChildEnforceAndCost(i, PhysicalProperties.GATHER);
}
}
} else if (requiredDistributionSpec instanceof DistributionSpecAny) {
for (int i = 0; i < childrenProperties.size(); i++) {
if (childrenProperties.get(i).getDistributionSpec() instanceof DistributionSpecStorageAny
|| childrenProperties.get(i).getDistributionSpec() instanceof DistributionSpecStorageGather
|| childrenProperties.get(i).getDistributionSpec() instanceof DistributionSpecGather
|| (childrenProperties.get(i).getDistributionSpec() instanceof DistributionSpecHash
&& ((DistributionSpecHash) childrenProperties.get(i).getDistributionSpec())
.getShuffleType() == ShuffleType.NATURAL)) {
updateChildEnforceAndCost(i, PhysicalProperties.EXECUTION_ANY);
}
}
} else if (requiredDistributionSpec instanceof DistributionSpecHash) {
DistributionSpecHash basic = (DistributionSpecHash) requiredDistributionSpec;
for (int i = 0; i < childrenProperties.size(); i++) {
DistributionSpecHash current = (DistributionSpecHash) childrenProperties.get(i).getDistributionSpec();
if (current.getShuffleType() != ShuffleType.EXECUTION_BUCKETED
|| !bothSideShuffleKeysAreSameOrder(basic, current,
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(i).getDistributionSpec())) {
PhysicalProperties target = calAnotherSideRequired(
ShuffleType.EXECUTION_BUCKETED, basic, current,
(DistributionSpecHash) requiredProperties.get(0).getDistributionSpec(),
(DistributionSpecHash) requiredProperties.get(i).getDistributionSpec());
updateChildEnforceAndCost(i, target);
}
}
}
return true;
}
@Override
public Boolean visitAbstractPhysicalSort(AbstractPhysicalSort<? extends Plan> sort, Void context) {
visit(sort, context);
if (sort.getSortPhase() == SortPhase.GATHER_SORT && sort.child() instanceof PhysicalDistribute) {
return false;
}
return true;
}
@Override
public Boolean visitPhysicalTopN(PhysicalTopN<? extends Plan> topN, Void context) {
visit(topN, context);
if (topN.getSortPhase() == SortPhase.LOCAL_SORT
&& childrenProperties.get(0).getDistributionSpec().equals(DistributionSpecGather.INSTANCE)) {
return false;
}
return true;
}
/**
* check both side real output hash key order are same or not.
*
* @param notShuffleSideOutput not shuffle side real output used hash spec
* @param shuffleSideOutput shuffle side real output used hash spec
* @param notShuffleSideRequired not shuffle side required used hash spec
* @param shuffleSideRequired shuffle side required hash spec
* @return true if same
*/
private boolean bothSideShuffleKeysAreSameOrder(
DistributionSpecHash notShuffleSideOutput, DistributionSpecHash shuffleSideOutput,
DistributionSpecHash notShuffleSideRequired, DistributionSpecHash shuffleSideRequired) {
List<ExprId> shuffleSideOutputList = shuffleSideOutput.getOrderedShuffledColumns();
List<ExprId> notShuffleSideOutputList = calAnotherSideRequiredShuffleIds(notShuffleSideOutput,
notShuffleSideRequired, shuffleSideRequired);
if (shuffleSideOutputList.size() != notShuffleSideOutputList.size()) {
return false;
} else if (shuffleSideOutputList.equals(notShuffleSideOutputList)) {
return true;
} else {
boolean isSatisfy = true;
for (int i = 0; i < shuffleSideOutputList.size() && isSatisfy; i++) {
ExprId shuffleSideExprId = shuffleSideOutputList.get(i);
ExprId notShuffleSideExprId = notShuffleSideOutputList.get(i);
if (!(shuffleSideExprId.equals(notShuffleSideExprId)
|| shuffleSideOutput.getEquivalenceExprIdsOf(shuffleSideExprId)
.contains(notShuffleSideExprId))) {
isSatisfy = false;
}
}
return isSatisfy;
}
}
/**
* calculate the shuffle side hash key right orders.
* For example,
* if not shuffle side real hash key is 1 2 3.
* the requirement of hash key of not shuffle side is 3 2 1.
* the requirement of hash key of shuffle side is 6 5 4.
* then we should let the shuffle side real output hash key order as 4 5 6
*
* @param notShuffleSideOutput not shuffle side real output used hash spec
* @param notShuffleSideRequired not shuffle side required used hash spec
* @param shuffleSideRequired shuffle side required hash spec
* @return shuffle side real output used hash key order
*/
private List<ExprId> calAnotherSideRequiredShuffleIds(DistributionSpecHash notShuffleSideOutput,
DistributionSpecHash notShuffleSideRequired, DistributionSpecHash shuffleSideRequired) {
ImmutableList.Builder<ExprId> rightShuffleIds = ImmutableList.builder();
for (ExprId scanId : notShuffleSideOutput.getOrderedShuffledColumns()) {
int index = notShuffleSideRequired.getOrderedShuffledColumns().indexOf(scanId);
if (index == -1) {
Set<ExprId> equivalentExprIds = notShuffleSideOutput.getEquivalenceExprIdsOf(scanId);
for (ExprId alternativeExpr : equivalentExprIds) {
index = notShuffleSideRequired.getOrderedShuffledColumns().indexOf(alternativeExpr);
if (index != -1) {
break;
}
}
}
Preconditions.checkState(index != -1, "index could not be -1");
rightShuffleIds.add(shuffleSideRequired.getOrderedShuffledColumns().get(index));
}
return rightShuffleIds.build();
}
/**
* generate shuffle side real output should follow PhysicalProperties. More info could see
* calAnotherSideRequiredShuffleIds's comment.
*
* @param shuffleType real output shuffle type
* @param notNeedShuffleSideOutput not shuffle side real output used hash spec
* @param needShuffleSideOutput shuffle side real output used hash spec
* @param notNeedShuffleSideRequired not shuffle side required used hash spec
* @param needShuffleSideRequired shuffle side required hash spec
* @return shuffle side new required hash spec
*/
private PhysicalProperties calAnotherSideRequired(ShuffleType shuffleType,
DistributionSpecHash notNeedShuffleSideOutput, DistributionSpecHash needShuffleSideOutput,
DistributionSpecHash notNeedShuffleSideRequired, DistributionSpecHash needShuffleSideRequired) {
List<ExprId> shuffleSideIds = calAnotherSideRequiredShuffleIds(notNeedShuffleSideOutput,
notNeedShuffleSideRequired, needShuffleSideRequired);
return new PhysicalProperties(new DistributionSpecHash(shuffleSideIds, shuffleType,
needShuffleSideOutput.getTableId(), needShuffleSideOutput.getSelectedIndexId(),
needShuffleSideOutput.getPartitionIds()));
}
private void updateChildEnforceAndCost(int index, PhysicalProperties targetProperties) {
GroupExpression child = children.get(index);
Pair<Cost, List<PhysicalProperties>> lowest = child.getLowestCostTable().get(childrenProperties.get(index));
PhysicalProperties output = child.getOutputProperties(childrenProperties.get(index));
DistributionSpec target = targetProperties.getDistributionSpec();
updateChildEnforceAndCost(child, output, target, lowest.first);
childrenProperties.set(index, targetProperties);
}
private void updateChildEnforceAndCost(GroupExpression child, PhysicalProperties childOutput,
DistributionSpec target, Cost currentCost) {
if (child.getPlan() instanceof PhysicalDistribute) {
childOutput = child.getInputPropertiesList(childOutput).get(0);
Pair<Cost, GroupExpression> newChildAndCost = child.getOwnerGroup().getLowestCostPlan(childOutput).get();
child = newChildAndCost.second;
currentCost = newChildAndCost.first;
}
PhysicalProperties newOutputProperty = new PhysicalProperties(target);
GroupExpression enforcer = target.addEnforcer(child.getOwnerGroup());
child.getOwnerGroup().addEnforcer(enforcer);
ConnectContext connectContext = jobContext.getCascadesContext().getConnectContext();
Cost totalCost = CostCalculator.addChildCost(connectContext, enforcer.getPlan(),
CostCalculator.calculateCost(connectContext, enforcer, Lists.newArrayList(childOutput)),
currentCost,
0);
if (enforcer.updateLowestCostTable(newOutputProperty,
Lists.newArrayList(childOutput), totalCost)) {
enforcer.putOutputPropertiesMap(newOutputProperty, newOutputProperty);
}
child.getOwnerGroup().setBestPlan(enforcer, totalCost, newOutputProperty);
}
} |
No, If the received exception message is not equal to those two strings it will anyway `throw exception;`. The problem was `exception.getMessage();` sometimes may be null, and then couldn't use `equals` method on `null`. | public DecimalValue(String value) {
if (isHexValueString(value)) {
this.value = hexToDecimalFloatingPointNumber(value);
} else {
try {
this.value = new BigDecimal(value, MathContext.DECIMAL128);
} catch (NumberFormatException exception) {
String message = exception.getMessage();
if ((message != null) && (message.equals("Too many nonzero exponent digits.") ||
message.equals("Exponent overflow."))) {
throw ErrorCreator.createError(BallerinaErrorReasons.LARGE_EXPONENT_ERROR,
BLangExceptionHelper.getErrorDetails(RuntimeErrors.LARGE_EXPONENTS_IN_DECIMAL, value));
}
throw exception;
}
}
if (!this.booleanValue()) {
this.valueKind = DecimalValueKind.ZERO;
}
} | if ((message != null) && (message.equals("Too many nonzero exponent digits.") || | public DecimalValue(String value) {
if (isHexValueString(value)) {
this.value = hexToDecimalFloatingPointNumber(value);
} else {
try {
this.value = new BigDecimal(value, MathContext.DECIMAL128);
} catch (NumberFormatException exception) {
String message = exception.getMessage();
if ((message != null) && (message.equals("Too many nonzero exponent digits.") ||
message.equals("Exponent overflow."))) {
throw ErrorCreator.createError(BallerinaErrorReasons.LARGE_EXPONENT_ERROR,
BLangExceptionHelper.getErrorDetails(RuntimeErrors.LARGE_EXPONENTS_IN_DECIMAL, value));
}
throw exception;
}
}
if (!this.booleanValue()) {
this.valueKind = DecimalValueKind.ZERO;
}
} | class DecimalValue implements SimpleValue, BDecimal {
private static final String INF_STRING = "Infinity";
private static final String NEG_INF_STRING = "-" + INF_STRING;
private static final String NAN = "NaN";
@Deprecated
public DecimalValueKind valueKind = DecimalValueKind.OTHER;
private final BigDecimal value;
public DecimalValue(BigDecimal value) {
this.value = value;
if (!this.booleanValue()) {
this.valueKind = DecimalValueKind.ZERO;
}
}
public DecimalValue(String value, DecimalValueKind valueKind) {
this(value);
this.valueKind = valueKind;
}
private static boolean isHexValueString(String value) {
String upperCaseValue = value.toUpperCase();
return upperCaseValue.startsWith("0X") || upperCaseValue.startsWith("-0X");
}
/**
* Method used to convert the hexadecimal number to decimal floating point number.
* BigDecimal does not support hexadecimal numbers. Hence, we need to convert the hexadecimal number to a
* decimal floating point number before passing the string value to the BigDecimal constructor.
*
* @param value Hexadecimal string value that needs to be converted.
* @return BigDecimal corresponds to the hexadecimal number provided.
*/
private static BigDecimal hexToDecimalFloatingPointNumber(String value) {
String upperCaseValue = value.toUpperCase();
String hexValue = upperCaseValue.replace("0X", "");
if (!hexValue.contains("P")) {
hexValue = hexValue.concat("P0");
}
String[] splitAtExponent = hexValue.split("P");
int binaryExponent = Integer.parseInt(splitAtExponent[1]);
String numberWithoutExp = splitAtExponent[0];
String intComponent;
if (numberWithoutExp.contains(".")) {
String[] numberComponents = numberWithoutExp.split("\\.");
intComponent = numberComponents[0];
String decimalComponent = numberComponents[1];
binaryExponent += 4 * (-1) * decimalComponent.length();
intComponent = intComponent.concat(decimalComponent);
} else {
intComponent = numberWithoutExp;
}
BigDecimal exponentValue;
if (binaryExponent >= 0) {
exponentValue = new BigDecimal(2).pow(binaryExponent);
} else {
exponentValue = BigDecimal.ONE.divide(new BigDecimal(2).pow(-binaryExponent), MathContext.DECIMAL128);
}
BigInteger hexEquivalentNumber = new BigInteger(intComponent, 16);
return new BigDecimal(hexEquivalentNumber).multiply(exponentValue, MathContext.DECIMAL128);
}
/**
* Get value of the decimal.
* @return the value
*/
public BigDecimal decimalValue() {
return this.value;
}
/**
* Get the int value of the decimal.
* May result in a {@code ErrorValue}
* @return the integer value
*/
public long intValue() {
if (!isDecimalWithinIntRange(this)) {
throw ErrorUtils.createNumericConversionError(this.stringValue(null), PredefinedTypes.TYPE_DECIMAL,
PredefinedTypes.TYPE_INT);
}
return (long) Math.rint(value.doubleValue());
}
/**
* Check the given value is in int range.
* @param decimalValue value to be checked
* @return true if the value is in int range
*/
public static boolean isDecimalWithinIntRange(DecimalValue decimalValue) {
BigDecimal value = decimalValue.value;
return value.compareTo(RuntimeConstants.BINT_MAX_VALUE_BIG_DECIMAL_RANGE_MAX) < 0 &&
value.compareTo(RuntimeConstants.BINT_MIN_VALUE_BIG_DECIMAL_RANGE_MIN) > 0;
}
/**
* Get the byte value.
* May result in a {@code ErrorValue}
* @return the byte value
*/
public int byteValue() {
int intVal = (int) Math.rint(this.value.doubleValue());
if (!isByteLiteral(intVal)) {
throw ErrorUtils.createNumericConversionError(value, PredefinedTypes.TYPE_DECIMAL,
PredefinedTypes.TYPE_BYTE);
}
return intVal;
}
private static boolean isByteLiteral(long longValue) {
return (longValue >= RuntimeConstants.BBYTE_MIN_VALUE && longValue <= RuntimeConstants.BBYTE_MAX_VALUE);
}
/**
* Get the float value.
* @return the double value
*/
public double floatValue() {
return value.doubleValue();
}
/**
* Check the given value represents true or false.
* @return true if the value is non zero
*/
public boolean booleanValue() {
return value.compareTo(BigDecimal.ZERO) != 0;
}
@Override
public Object copy(Map<Object, Object> refs) {
return this;
}
@Override
public Object frozenCopy(Map<Object, Object> refs) {
return this;
}
/**
* Get the string value.
* @return string value
* @param parent The link to the parent node
*/
public String stringValue(BLink parent) {
if (this.valueKind != DecimalValueKind.OTHER) {
return this.valueKind.getValue();
}
return value.toString();
}
/**
* Get the string value in expression style.
* @return string value in expression style
* @param parent The link to the parent node
*/
public String expressionStringValue(BLink parent) {
if (this.valueKind != DecimalValueKind.OTHER) {
return this.valueKind.getValue() + "d";
}
return value.toString() + "d";
}
/**
* Get the {@code BigDecimal} value.
* @return the decimal value
*/
public BigDecimal value() {
return this.value;
}
/**
* Get the {@code BType} of the value.
* @return the type
*/
public Type getType() {
return PredefinedTypes.TYPE_DECIMAL;
}
/**
* Returns a {decimal whose value is {@code (this + augend)}.
* @param augend value to be added.
* @return new value
*/
public DecimalValue add(DecimalValue augend) {
if (this.valueKind == DecimalValueKind.ZERO) {
return augend;
}
if (augend.valueKind == DecimalValueKind.ZERO) {
return this;
}
return new DecimalValue(this.decimalValue().add(augend.decimalValue(), MathContext.DECIMAL128));
}
/**
* Returns a decimal whose value is {@code (this - subtrahend)}.
* @param subtrahend value to be subtracted
* @return value after subtraction
*/
public DecimalValue subtract(DecimalValue subtrahend) {
if (this.valueKind == DecimalValueKind.ZERO) {
if (subtrahend.valueKind == DecimalValueKind.ZERO) {
return subtrahend;
}
return subtrahend.negate();
}
if (subtrahend.valueKind == DecimalValueKind.ZERO) {
return this;
}
return new DecimalValue(this.decimalValue().subtract(subtrahend.decimalValue(),
MathContext.DECIMAL128));
}
/**
* Returns a decimal whose value is <tt>(this ×
* multiplicand)</tt>.
* @param multiplicand value to be multiplied
* @return value after multiplication
*/
public DecimalValue multiply(DecimalValue multiplicand) {
if (this.valueKind == DecimalValueKind.ZERO) {
return this;
}
if (multiplicand.valueKind == DecimalValueKind.OTHER) {
return new DecimalValue(this.decimalValue().multiply(multiplicand.decimalValue(),
MathContext.DECIMAL128));
}
return multiplicand;
}
/**
* Returns a decimal whose value is {@code (this /
* divisor)}.
* @param divisor value by which this decimal is to be divided
* @return value after division
*/
public DecimalValue divide(DecimalValue divisor) {
if (this.valueKind == DecimalValueKind.ZERO) {
if (divisor.valueKind == DecimalValueKind.ZERO) {
throw ErrorUtils.createInvalidDecimalError(NAN);
}
return this;
}
if (divisor.valueKind == DecimalValueKind.OTHER) {
return new DecimalValue(this.decimalValue().divide(divisor.decimalValue(), MathContext.DECIMAL128));
}
if (this.decimalValue().compareTo(BigDecimal.ZERO) > 0) {
throw ErrorUtils.createInvalidDecimalError(INF_STRING);
} else {
throw ErrorUtils.createInvalidDecimalError(NEG_INF_STRING);
}
}
/**
* Returns a decimal whose value is {@code (this %
* divisor)}.
* @param divisor value by which this decimal is to be divided
* @return {@code this % divisor}
*/
public DecimalValue remainder(DecimalValue divisor) {
if (divisor.valueKind == DecimalValueKind.OTHER) {
return new DecimalValue(this.decimalValue().remainder(divisor.decimalValue(),
MathContext.DECIMAL128));
}
throw ErrorUtils.createInvalidDecimalError(NAN);
}
/**
* Returns a decimal whose value is {@code (-this)}.
* @return {@code -this}
*/
public DecimalValue negate() {
if (this.valueKind == DecimalValueKind.OTHER) {
return new DecimalValue(this.decimalValue().negate());
}
return this;
}
@Override
public BDecimal add(BDecimal augend) {
return add((DecimalValue) augend);
}
@Override
public BDecimal subtract(BDecimal subtrahend) {
return subtract((DecimalValue) subtrahend);
}
@Override
public BDecimal multiply(BDecimal multiplicand) {
return multiply((DecimalValue) multiplicand);
}
@Override
public BDecimal divide(BDecimal divisor) {
return divide((DecimalValue) divisor);
}
@Override
public BDecimal remainder(BDecimal divisor) {
return remainder((DecimalValue) divisor);
}
/**
* Returns value kind of {@code (-this)}.
* @return value kind
*/
public DecimalValueKind getValueKind() {
return valueKind;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
DecimalValue bDecimal = (DecimalValue) obj;
return ((value.compareTo(bDecimal.value) == 0) && (this.valueKind == bDecimal.valueKind));
}
@Override
public int hashCode() {
return value.hashCode();
}
/**
* Get the string value.
* @return string value
*/
@Override
public String toString() {
return this.stringValue(null);
}
/**
* Returns decimal of given int value.
* @param value integer value
* @return decimal value
*/
public static DecimalValue valueOf(int value) {
return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, BigDecimal.ROUND_HALF_EVEN));
}
/**
* Returns decimal of given long value.
* @param value long value
* @return decimal value
*/
public static DecimalValue valueOf(long value) {
return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, BigDecimal.ROUND_HALF_EVEN));
}
/**
* Returns decimal of given double value.
* @param value double value
* @return decimal value
*/
public static DecimalValue valueOf(double value) {
if (Double.isNaN(value)) {
throw ErrorUtils.createInvalidDecimalError(NAN);
}
if (value == Double.POSITIVE_INFINITY) {
throw ErrorUtils.createInvalidDecimalError(INF_STRING);
}
if (value == Double.NEGATIVE_INFINITY) {
throw ErrorUtils.createInvalidDecimalError(NEG_INF_STRING);
}
return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128));
}
/**
* Returns decimal of given boolean value.
* @param value boolean value
* @return decimal value
*/
public static DecimalValue valueOf(boolean value) {
return new DecimalValue(value ? BigDecimal.ONE.setScale(1, BigDecimal.ROUND_HALF_EVEN) :
BigDecimal.ZERO.setScale(1, BigDecimal.ROUND_HALF_EVEN));
}
public static DecimalValue valueOfJ(byte value) {
return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, BigDecimal.ROUND_HALF_EVEN));
}
public static DecimalValue valueOfJ(char value) {
return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, BigDecimal.ROUND_HALF_EVEN));
}
public static DecimalValue valueOfJ(short value) {
return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, BigDecimal.ROUND_HALF_EVEN));
}
public static DecimalValue valueOfJ(int value) {
return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, BigDecimal.ROUND_HALF_EVEN));
}
public static DecimalValue valueOfJ(long value) {
return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, BigDecimal.ROUND_HALF_EVEN));
}
public static DecimalValue valueOfJ(float value) {
return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, BigDecimal.ROUND_HALF_EVEN));
}
public static DecimalValue valueOfJ(double value) {
return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, BigDecimal.ROUND_HALF_EVEN));
}
public static DecimalValue valueOfJ(BigDecimal value) {
return new DecimalValue(new BigDecimal(value.toString(), MathContext.DECIMAL128)
.setScale(1, BigDecimal.ROUND_HALF_EVEN));
}
} | class DecimalValue implements SimpleValue, BDecimal {
private static final String INF_STRING = "Infinity";
private static final String NEG_INF_STRING = "-" + INF_STRING;
private static final String NAN = "NaN";
@Deprecated
public DecimalValueKind valueKind = DecimalValueKind.OTHER;
private final BigDecimal value;
public DecimalValue(BigDecimal value) {
this.value = value;
if (!this.booleanValue()) {
this.valueKind = DecimalValueKind.ZERO;
}
}
public DecimalValue(String value, DecimalValueKind valueKind) {
this(value);
this.valueKind = valueKind;
}
private static boolean isHexValueString(String value) {
String upperCaseValue = value.toUpperCase();
return upperCaseValue.startsWith("0X") || upperCaseValue.startsWith("-0X");
}
/**
* Method used to convert the hexadecimal number to decimal floating point number.
* BigDecimal does not support hexadecimal numbers. Hence, we need to convert the hexadecimal number to a
* decimal floating point number before passing the string value to the BigDecimal constructor.
*
* @param value Hexadecimal string value that needs to be converted.
* @return BigDecimal corresponds to the hexadecimal number provided.
*/
private static BigDecimal hexToDecimalFloatingPointNumber(String value) {
String upperCaseValue = value.toUpperCase();
String hexValue = upperCaseValue.replace("0X", "");
if (!hexValue.contains("P")) {
hexValue = hexValue.concat("P0");
}
String[] splitAtExponent = hexValue.split("P");
int binaryExponent = Integer.parseInt(splitAtExponent[1]);
String numberWithoutExp = splitAtExponent[0];
String intComponent;
if (numberWithoutExp.contains(".")) {
String[] numberComponents = numberWithoutExp.split("\\.");
intComponent = numberComponents[0];
String decimalComponent = numberComponents[1];
binaryExponent += 4 * (-1) * decimalComponent.length();
intComponent = intComponent.concat(decimalComponent);
} else {
intComponent = numberWithoutExp;
}
BigDecimal exponentValue;
if (binaryExponent >= 0) {
exponentValue = new BigDecimal(2).pow(binaryExponent);
} else {
exponentValue = BigDecimal.ONE.divide(new BigDecimal(2).pow(-binaryExponent), MathContext.DECIMAL128);
}
BigInteger hexEquivalentNumber = new BigInteger(intComponent, 16);
return new BigDecimal(hexEquivalentNumber).multiply(exponentValue, MathContext.DECIMAL128);
}
/**
* Get value of the decimal.
* @return the value
*/
public BigDecimal decimalValue() {
return this.value;
}
/**
* Get the int value of the decimal.
* May result in a {@code ErrorValue}
* @return the integer value
*/
public long intValue() {
if (!isDecimalWithinIntRange(this)) {
throw ErrorUtils.createNumericConversionError(this.stringValue(null), PredefinedTypes.TYPE_DECIMAL,
PredefinedTypes.TYPE_INT);
}
return (long) Math.rint(value.doubleValue());
}
/**
* Check the given value is in int range.
* @param decimalValue value to be checked
* @return true if the value is in int range
*/
public static boolean isDecimalWithinIntRange(DecimalValue decimalValue) {
BigDecimal value = decimalValue.value;
return value.compareTo(RuntimeConstants.BINT_MAX_VALUE_BIG_DECIMAL_RANGE_MAX) < 0 &&
value.compareTo(RuntimeConstants.BINT_MIN_VALUE_BIG_DECIMAL_RANGE_MIN) > 0;
}
/**
* Get the byte value.
* May result in a {@code ErrorValue}
* @return the byte value
*/
public int byteValue() {
int intVal = (int) Math.rint(this.value.doubleValue());
if (!isByteLiteral(intVal)) {
throw ErrorUtils.createNumericConversionError(value, PredefinedTypes.TYPE_DECIMAL,
PredefinedTypes.TYPE_BYTE);
}
return intVal;
}
private static boolean isByteLiteral(long longValue) {
return (longValue >= RuntimeConstants.BBYTE_MIN_VALUE && longValue <= RuntimeConstants.BBYTE_MAX_VALUE);
}
/**
* Get the float value.
* @return the double value
*/
public double floatValue() {
return value.doubleValue();
}
/**
* Check the given value represents true or false.
* @return true if the value is non zero
*/
public boolean booleanValue() {
return value.compareTo(BigDecimal.ZERO) != 0;
}
@Override
public Object copy(Map<Object, Object> refs) {
return this;
}
@Override
public Object frozenCopy(Map<Object, Object> refs) {
return this;
}
/**
* Get the string value.
* @return string value
* @param parent The link to the parent node
*/
public String stringValue(BLink parent) {
if (this.valueKind != DecimalValueKind.OTHER) {
return this.valueKind.getValue();
}
return value.toString();
}
/**
* Get the string value in expression style.
* @return string value in expression style
* @param parent The link to the parent node
*/
public String expressionStringValue(BLink parent) {
if (this.valueKind != DecimalValueKind.OTHER) {
return this.valueKind.getValue() + "d";
}
return value.toString() + "d";
}
/**
* Get the {@code BigDecimal} value.
* @return the decimal value
*/
public BigDecimal value() {
return this.value;
}
/**
* Get the {@code BType} of the value.
* @return the type
*/
public Type getType() {
return PredefinedTypes.TYPE_DECIMAL;
}
/**
* Returns a {decimal whose value is {@code (this + augend)}.
* @param augend value to be added.
* @return new value
*/
public DecimalValue add(DecimalValue augend) {
if (this.valueKind == DecimalValueKind.ZERO) {
return augend;
}
if (augend.valueKind == DecimalValueKind.ZERO) {
return this;
}
return new DecimalValue(this.decimalValue().add(augend.decimalValue(), MathContext.DECIMAL128));
}
/**
* Returns a decimal whose value is {@code (this - subtrahend)}.
* @param subtrahend value to be subtracted
* @return value after subtraction
*/
public DecimalValue subtract(DecimalValue subtrahend) {
if (this.valueKind == DecimalValueKind.ZERO) {
if (subtrahend.valueKind == DecimalValueKind.ZERO) {
return subtrahend;
}
return subtrahend.negate();
}
if (subtrahend.valueKind == DecimalValueKind.ZERO) {
return this;
}
return new DecimalValue(this.decimalValue().subtract(subtrahend.decimalValue(),
MathContext.DECIMAL128));
}
/**
* Returns a decimal whose value is <tt>(this ×
* multiplicand)</tt>.
* @param multiplicand value to be multiplied
* @return value after multiplication
*/
public DecimalValue multiply(DecimalValue multiplicand) {
if (this.valueKind == DecimalValueKind.ZERO) {
return this;
}
if (multiplicand.valueKind == DecimalValueKind.OTHER) {
return new DecimalValue(this.decimalValue().multiply(multiplicand.decimalValue(),
MathContext.DECIMAL128));
}
return multiplicand;
}
/**
* Returns a decimal whose value is {@code (this /
* divisor)}.
* @param divisor value by which this decimal is to be divided
* @return value after division
*/
public DecimalValue divide(DecimalValue divisor) {
if (this.valueKind == DecimalValueKind.ZERO) {
if (divisor.valueKind == DecimalValueKind.ZERO) {
throw ErrorUtils.createInvalidDecimalError(NAN);
}
return this;
}
if (divisor.valueKind == DecimalValueKind.OTHER) {
return new DecimalValue(this.decimalValue().divide(divisor.decimalValue(), MathContext.DECIMAL128));
}
if (this.decimalValue().compareTo(BigDecimal.ZERO) > 0) {
throw ErrorUtils.createInvalidDecimalError(INF_STRING);
} else {
throw ErrorUtils.createInvalidDecimalError(NEG_INF_STRING);
}
}
/**
* Returns a decimal whose value is {@code (this %
* divisor)}.
* @param divisor value by which this decimal is to be divided
* @return {@code this % divisor}
*/
public DecimalValue remainder(DecimalValue divisor) {
if (divisor.valueKind == DecimalValueKind.OTHER) {
return new DecimalValue(this.decimalValue().remainder(divisor.decimalValue(),
MathContext.DECIMAL128));
}
throw ErrorUtils.createInvalidDecimalError(NAN);
}
/**
* Returns a decimal whose value is {@code (-this)}.
* @return {@code -this}
*/
public DecimalValue negate() {
if (this.valueKind == DecimalValueKind.OTHER) {
return new DecimalValue(this.decimalValue().negate());
}
return this;
}
@Override
public BDecimal add(BDecimal augend) {
return add((DecimalValue) augend);
}
@Override
public BDecimal subtract(BDecimal subtrahend) {
return subtract((DecimalValue) subtrahend);
}
@Override
public BDecimal multiply(BDecimal multiplicand) {
return multiply((DecimalValue) multiplicand);
}
@Override
public BDecimal divide(BDecimal divisor) {
return divide((DecimalValue) divisor);
}
@Override
public BDecimal remainder(BDecimal divisor) {
return remainder((DecimalValue) divisor);
}
/**
* Returns value kind of {@code (-this)}.
* @return value kind
*/
public DecimalValueKind getValueKind() {
return valueKind;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
DecimalValue bDecimal = (DecimalValue) obj;
return ((value.compareTo(bDecimal.value) == 0) && (this.valueKind == bDecimal.valueKind));
}
@Override
public int hashCode() {
return value.hashCode();
}
/**
* Get the string value.
* @return string value
*/
@Override
public String toString() {
return this.stringValue(null);
}
/**
* Returns decimal of given int value.
* @param value integer value
* @return decimal value
*/
public static DecimalValue valueOf(int value) {
return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, BigDecimal.ROUND_HALF_EVEN));
}
/**
* Returns decimal of given long value.
* @param value long value
* @return decimal value
*/
public static DecimalValue valueOf(long value) {
return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, BigDecimal.ROUND_HALF_EVEN));
}
/**
* Returns decimal of given double value.
* @param value double value
* @return decimal value
*/
public static DecimalValue valueOf(double value) {
if (Double.isNaN(value)) {
throw ErrorUtils.createInvalidDecimalError(NAN);
}
if (value == Double.POSITIVE_INFINITY) {
throw ErrorUtils.createInvalidDecimalError(INF_STRING);
}
if (value == Double.NEGATIVE_INFINITY) {
throw ErrorUtils.createInvalidDecimalError(NEG_INF_STRING);
}
return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128));
}
/**
* Returns decimal of given boolean value.
* @param value boolean value
* @return decimal value
*/
public static DecimalValue valueOf(boolean value) {
return new DecimalValue(value ? BigDecimal.ONE.setScale(1, BigDecimal.ROUND_HALF_EVEN) :
BigDecimal.ZERO.setScale(1, BigDecimal.ROUND_HALF_EVEN));
}
public static DecimalValue valueOfJ(byte value) {
return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, BigDecimal.ROUND_HALF_EVEN));
}
public static DecimalValue valueOfJ(char value) {
return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, BigDecimal.ROUND_HALF_EVEN));
}
public static DecimalValue valueOfJ(short value) {
return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, BigDecimal.ROUND_HALF_EVEN));
}
public static DecimalValue valueOfJ(int value) {
return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, BigDecimal.ROUND_HALF_EVEN));
}
public static DecimalValue valueOfJ(long value) {
return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, BigDecimal.ROUND_HALF_EVEN));
}
public static DecimalValue valueOfJ(float value) {
return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, BigDecimal.ROUND_HALF_EVEN));
}
public static DecimalValue valueOfJ(double value) {
return new DecimalValue(new BigDecimal(value, MathContext.DECIMAL128).setScale(1, BigDecimal.ROUND_HALF_EVEN));
}
public static DecimalValue valueOfJ(BigDecimal value) {
return new DecimalValue(new BigDecimal(value.toString(), MathContext.DECIMAL128)
.setScale(1, BigDecimal.ROUND_HALF_EVEN));
}
} |
Also move the de-dup logic to `visit(BlangTypedefinition)` | public void visit(BLangPackage pkgNode) {
if (pkgNode.completedPhases.contains(CompilerPhase.DESUGAR)) {
result = pkgNode;
return;
}
observabilityDesugar.addObserveInternalModuleImport(pkgNode);
observabilityDesugar.addObserveModuleImport(pkgNode);
code2CloudDesugar.addCode2CloudModuleImport(pkgNode);
createPackageInitFunctions(pkgNode, env);
addAttachedFunctionsToPackageLevel(pkgNode, env);
if (!pkgNode.testablePkgs.isEmpty() && pkgNode.getTestablePkg().getMockFunctionNamesMap() != null) {
mockDesugar.generateMockFunctions(pkgNode);
}
pkgNode.constants.stream()
.filter(constant -> constant.expr.getKind() == NodeKind.LITERAL ||
constant.expr.getKind() == NodeKind.NUMERIC_LITERAL)
.forEach(constant -> pkgNode.typeDefinitions.add(constant.associatedTypeDefinition));
BLangBlockStmt serviceAttachments = serviceDesugar.rewriteServiceVariables(pkgNode.services, env);
BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) pkgNode.initFunction.body;
for (BLangConstant constant : pkgNode.constants) {
if (constant.symbol.type.tag == TypeTags.MAP) {
BLangSimpleVarRef constVarRef = ASTBuilderUtil.createVariableRef(constant.pos, constant.symbol);
constant.expr = rewrite(constant.expr, SymbolEnv.createTypeEnv(constant.typeNode,
pkgNode.initFunction.symbol.scope, env));
BLangInvocation frozenConstValExpr =
createLangLibInvocationNode(
"cloneReadOnly", constant.expr, new ArrayList<>(), constant.expr.type, constant.pos);
BLangAssignment constInit =
ASTBuilderUtil.createAssignmentStmt(constant.pos, constVarRef, frozenConstValExpr);
initFnBody.stmts.add(constInit);
}
}
Map<String, String> map = new HashMap<String, String>();
for (int i = 0; i < pkgNode.constants.size(); i++) {
String nextKey = pkgNode.constants.get(i).toString();
String keyOwner = pkgNode.constants.get(i).symbol.owner.toString();
if (map.containsKey(nextKey)) {
if (map.get(nextKey) == keyOwner) {
pkgNode.constants.remove(i);
i -= 1;
} else {
map.put(nextKey, keyOwner);
}
} else {
map.put(nextKey, keyOwner);
}
}
pkgNode.globalVars = desugarGlobalVariables(pkgNode, initFnBody);
pkgNode.services.forEach(service -> serviceDesugar.engageCustomServiceDesugar(service, env));
annotationDesugar.rewritePackageAnnotations(pkgNode, env);
addUserDefinedModuleInitInvocationAndReturn(pkgNode);
pkgNode.typeDefinitions.sort(Comparator.comparing(t -> t.precedence));
pkgNode.typeDefinitions = rewrite(pkgNode.typeDefinitions, env);
pkgNode.xmlnsList = rewrite(pkgNode.xmlnsList, env);
pkgNode.constants = rewrite(pkgNode.constants, env);
pkgNode.globalVars = rewrite(pkgNode.globalVars, env);
desugarClassDefinitions(pkgNode.topLevelNodes);
serviceDesugar.rewriteListeners(pkgNode.globalVars, env, pkgNode.startFunction, pkgNode.stopFunction);
ASTBuilderUtil.appendStatements(serviceAttachments, (BLangBlockFunctionBody) pkgNode.initFunction.body);
addNilReturnStatement((BLangBlockFunctionBody) pkgNode.startFunction.body);
addNilReturnStatement((BLangBlockFunctionBody) pkgNode.stopFunction.body);
pkgNode.initFunction = splitInitFunction(pkgNode, env);
pkgNode.initFunction = rewrite(pkgNode.initFunction, env);
pkgNode.startFunction = rewrite(pkgNode.startFunction, env);
pkgNode.stopFunction = rewrite(pkgNode.stopFunction, env);
pkgNode.functions = rewrite(pkgNode.functions, env);
closureDesugar.visit(pkgNode);
for (BLangTestablePackage testablePkg : pkgNode.getTestablePkgs()) {
rewrite(testablePkg, this.symTable.pkgEnvMap.get(testablePkg.symbol));
}
pkgNode.completedPhases.add(CompilerPhase.DESUGAR);
initFuncIndex = 0;
result = pkgNode;
} | Map<String, String> map = new HashMap<String, String>(); | public void visit(BLangPackage pkgNode) {
if (pkgNode.completedPhases.contains(CompilerPhase.DESUGAR)) {
result = pkgNode;
return;
}
observabilityDesugar.addObserveInternalModuleImport(pkgNode);
observabilityDesugar.addObserveModuleImport(pkgNode);
code2CloudDesugar.addCode2CloudModuleImport(pkgNode);
createPackageInitFunctions(pkgNode, env);
addAttachedFunctionsToPackageLevel(pkgNode, env);
if (!pkgNode.testablePkgs.isEmpty() && pkgNode.getTestablePkg().getMockFunctionNamesMap() != null) {
mockDesugar.generateMockFunctions(pkgNode);
}
pkgNode.constants.stream()
.filter(constant -> constant.expr.getKind() == NodeKind.LITERAL ||
constant.expr.getKind() == NodeKind.NUMERIC_LITERAL)
.forEach(constant -> pkgNode.typeDefinitions.add(constant.associatedTypeDefinition));
BLangBlockStmt serviceAttachments = serviceDesugar.rewriteServiceVariables(pkgNode.services, env);
BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) pkgNode.initFunction.body;
for (BLangConstant constant : pkgNode.constants) {
if (constant.symbol.type.tag == TypeTags.MAP) {
BLangSimpleVarRef constVarRef = ASTBuilderUtil.createVariableRef(constant.pos, constant.symbol);
constant.expr = rewrite(constant.expr, SymbolEnv.createTypeEnv(constant.typeNode,
pkgNode.initFunction.symbol.scope, env));
BLangInvocation frozenConstValExpr =
createLangLibInvocationNode(
"cloneReadOnly", constant.expr, new ArrayList<>(), constant.expr.getBType(),
constant.pos);
BLangAssignment constInit =
ASTBuilderUtil.createAssignmentStmt(constant.pos, constVarRef, frozenConstValExpr);
initFnBody.stmts.add(constInit);
}
}
pkgNode.constants = removeDuplicateConstants(pkgNode);
pkgNode.globalVars = desugarGlobalVariables(pkgNode, initFnBody);
pkgNode.services.forEach(service -> serviceDesugar.engageCustomServiceDesugar(service, env));
annotationDesugar.rewritePackageAnnotations(pkgNode, env);
addUserDefinedModuleInitInvocationAndReturn(pkgNode);
pkgNode.typeDefinitions.sort(Comparator.comparing(t -> t.precedence));
pkgNode.typeDefinitions = rewrite(pkgNode.typeDefinitions, env);
pkgNode.xmlnsList = rewrite(pkgNode.xmlnsList, env);
pkgNode.constants = rewrite(pkgNode.constants, env);
pkgNode.globalVars = rewrite(pkgNode.globalVars, env);
desugarClassDefinitions(pkgNode.topLevelNodes);
serviceDesugar.rewriteListeners(pkgNode.globalVars, env, pkgNode.startFunction, pkgNode.stopFunction);
ASTBuilderUtil.appendStatements(serviceAttachments, (BLangBlockFunctionBody) pkgNode.initFunction.body);
addNilReturnStatement((BLangBlockFunctionBody) pkgNode.startFunction.body);
addNilReturnStatement((BLangBlockFunctionBody) pkgNode.stopFunction.body);
pkgNode.initFunction = splitInitFunction(pkgNode, env);
pkgNode.initFunction = rewrite(pkgNode.initFunction, env);
pkgNode.startFunction = rewrite(pkgNode.startFunction, env);
pkgNode.stopFunction = rewrite(pkgNode.stopFunction, env);
pkgNode.functions = rewrite(pkgNode.functions, env);
closureDesugar.visit(pkgNode);
for (BLangTestablePackage testablePkg : pkgNode.getTestablePkgs()) {
rewrite(testablePkg, this.symTable.pkgEnvMap.get(testablePkg.symbol));
}
pkgNode.completedPhases.add(CompilerPhase.DESUGAR);
initFuncIndex = 0;
result = pkgNode;
} | class definition node for which the initializer is created
* @param env The env for the type node
* @return The generated initializer method
*/
private BLangFunction createGeneratedInitializerFunction(BLangClassDefinition classDefinition, SymbolEnv env) {
BLangFunction generatedInitFunc = createInitFunctionForClassDefn(classDefinition, env);
if (classDefinition.initFunction == null) {
return generatedInitFunc;
}
return wireUpGeneratedInitFunction(generatedInitFunc,
(BObjectTypeSymbol) classDefinition.symbol, classDefinition.initFunction);
} | class definition node for which the initializer is created
* @param env The env for the type node
* @return The generated initializer method
*/
private BLangFunction createGeneratedInitializerFunction(BLangClassDefinition classDefinition, SymbolEnv env) {
BLangFunction generatedInitFunc = createInitFunctionForClassDefn(classDefinition, env);
if (classDefinition.initFunction == null) {
return generatedInitFunc;
}
return wireUpGeneratedInitFunction(generatedInitFunc,
(BObjectTypeSymbol) classDefinition.symbol, classDefinition.initFunction);
} |
Just to understand it: The fatal error handler is used when running the cluster but not utilized when initializing it, isn't it? I was just wondering whether we should use the `fatalErrorHandler` here. But looking through the code made me conclude that it's not necessary because failure during startup are handled in the [ClusterEntrypoint#startCluster:197](https://github.com/apache/flink/blob/6b4f8a4dce08b2852cfea9db8b5dffff0712aac6/flink-runtime/src/main/java/org/apache/flink/runtime/entrypoint/ClusterEntrypoint.java#L197) and handled properly in `ClusterEntrypoint.runClusterEntrypoint`. | private MultipleComponentLeaderElectionService getOrInitializeSingleLeaderElectionService() {
if (multipleComponentLeaderElectionService == null) {
try {
multipleComponentLeaderElectionService =
new DefaultMultipleComponentLeaderElectionService(
fatalErrorHandler,
"Single leader election service.",
new ZooKeeperMultipleComponentLeaderElectionDriverFactory(
leaderNamespacedCuratorFramework));
} catch (Exception e) {
throw new FlinkRuntimeException(
String.format(
"Could not initialize the %s",
DefaultMultipleComponentLeaderElectionService.class
.getSimpleName()),
e);
}
}
return multipleComponentLeaderElectionService;
} | throw new FlinkRuntimeException( | private MultipleComponentLeaderElectionService getOrInitializeSingleLeaderElectionService() {
synchronized (lock) {
if (multipleComponentLeaderElectionService == null) {
try {
multipleComponentLeaderElectionService =
new DefaultMultipleComponentLeaderElectionService(
fatalErrorHandler,
new ZooKeeperMultipleComponentLeaderElectionDriverFactory(
leaderNamespacedCuratorFramework));
} catch (Exception e) {
throw new FlinkRuntimeException(
String.format(
"Could not initialize the %s",
DefaultMultipleComponentLeaderElectionService.class
.getSimpleName()),
e);
}
}
return multipleComponentLeaderElectionService;
}
} | class ZooKeeperMultipleComponentLeaderElectionHaServices
extends AbstractZooKeeperHaServices {
private final Object lock = new Object();
private final CuratorFramework leaderNamespacedCuratorFramework;
private final FatalErrorHandler fatalErrorHandler;
@Nullable
@GuardedBy("lock")
private MultipleComponentLeaderElectionService multipleComponentLeaderElectionService = null;
public ZooKeeperMultipleComponentLeaderElectionHaServices(
CuratorFrameworkWithUnhandledErrorListener curatorFrameworkWrapper,
Configuration config,
Executor ioExecutor,
BlobStoreService blobStoreService,
FatalErrorHandler fatalErrorHandler)
throws Exception {
super(curatorFrameworkWrapper, ioExecutor, config, blobStoreService);
this.leaderNamespacedCuratorFramework =
ZooKeeperUtils.useNamespaceAndEnsurePath(
getCuratorFramework(), ZooKeeperUtils.getLeaderPath());
this.fatalErrorHandler = fatalErrorHandler;
}
@Override
protected LeaderElectionService createLeaderElectionService(String leaderName) {
final MultipleComponentLeaderElectionService multipleComponentLeaderElectionService;
synchronized (lock) {
multipleComponentLeaderElectionService = getOrInitializeSingleLeaderElectionService();
}
return new DefaultLeaderElectionService(
multipleComponentLeaderElectionService.createDriverFactory(leaderName));
}
@GuardedBy("lock")
@Override
protected LeaderRetrievalService createLeaderRetrievalService(String leaderPath) {
return ZooKeeperUtils.createLeaderRetrievalService(
leaderNamespacedCuratorFramework, leaderPath, configuration);
}
@Override
protected void internalClose() throws Exception {
Exception exception = null;
synchronized (lock) {
if (multipleComponentLeaderElectionService != null) {
try {
multipleComponentLeaderElectionService.close();
} catch (Exception e) {
exception = e;
}
multipleComponentLeaderElectionService = null;
}
}
try {
super.internalClose();
} catch (Exception e) {
exception = ExceptionUtils.firstOrSuppressed(e, exception);
}
ExceptionUtils.tryRethrowException(exception);
}
@Override
protected void internalCleanupJobData(JobID jobID) throws Exception {
super.internalCleanupJobData(jobID);
}
@Override
protected String getLeaderPathForResourceManager() {
return ZooKeeperUtils.getResourceManagerNode();
}
@Override
protected String getLeaderPathForDispatcher() {
return ZooKeeperUtils.getDispatcherNode();
}
@Override
protected String getLeaderPathForJobManager(JobID jobID) {
return jobID.toString();
}
@Override
protected String getLeaderPathForRestServer() {
return ZooKeeperUtils.getRestServerNode();
}
} | class ZooKeeperMultipleComponentLeaderElectionHaServices
extends AbstractZooKeeperHaServices {
private final Object lock = new Object();
private final CuratorFramework leaderNamespacedCuratorFramework;
private final FatalErrorHandler fatalErrorHandler;
@Nullable
@GuardedBy("lock")
private MultipleComponentLeaderElectionService multipleComponentLeaderElectionService = null;
public ZooKeeperMultipleComponentLeaderElectionHaServices(
CuratorFrameworkWithUnhandledErrorListener curatorFrameworkWrapper,
Configuration config,
Executor ioExecutor,
BlobStoreService blobStoreService,
FatalErrorHandler fatalErrorHandler)
throws Exception {
super(curatorFrameworkWrapper, ioExecutor, config, blobStoreService);
this.leaderNamespacedCuratorFramework =
ZooKeeperUtils.useNamespaceAndEnsurePath(
getCuratorFramework(), ZooKeeperUtils.getLeaderPath());
this.fatalErrorHandler = fatalErrorHandler;
}
@Override
protected LeaderElectionService createLeaderElectionService(String leaderName) {
return new DefaultLeaderElectionService(
getOrInitializeSingleLeaderElectionService().createDriverFactory(leaderName));
}
@Override
protected LeaderRetrievalService createLeaderRetrievalService(String leaderPath) {
return ZooKeeperUtils.createLeaderRetrievalService(
leaderNamespacedCuratorFramework, leaderPath, configuration);
}
@Override
protected void internalClose() throws Exception {
Exception exception = null;
synchronized (lock) {
if (multipleComponentLeaderElectionService != null) {
try {
multipleComponentLeaderElectionService.close();
} catch (Exception e) {
exception = e;
}
multipleComponentLeaderElectionService = null;
}
}
try {
super.internalClose();
} catch (Exception e) {
exception = ExceptionUtils.firstOrSuppressed(e, exception);
}
ExceptionUtils.tryRethrowException(exception);
}
@Override
protected String getLeaderPathForResourceManager() {
return ZooKeeperUtils.getResourceManagerNode();
}
@Override
protected String getLeaderPathForDispatcher() {
return ZooKeeperUtils.getDispatcherNode();
}
@Override
protected String getLeaderPathForJobManager(JobID jobID) {
return jobID.toString();
}
@Override
protected String getLeaderPathForRestServer() {
return ZooKeeperUtils.getRestServerNode();
}
} |
In general, I'm all for _not_ "hiding" versions in code, be it inline or constants. Even some of our explicit descriptors (e.g. codestarts IIRC) should be filtered by Maven instead. The only downside I see with build-parent is that you end up building the entire project, although only a (tiny) fraction of modules is affected. | public String getEffectiveBuilderImage() {
final String builderImageName = this.builderImage.toUpperCase();
if (builderImageName.equals(BuilderImageProvider.GRAALVM.name())) {
return "quay.io/quarkus/ubi-quarkus-native-image:21.3-java11";
} else if (builderImageName.equals(BuilderImageProvider.MANDREL.name())) {
return "quay.io/quarkus/ubi-quarkus-mandrel:21.3-java11";
} else {
return this.builderImage;
}
} | return "quay.io/quarkus/ubi-quarkus-mandrel:21.3-java11"; | public String getEffectiveBuilderImage() {
final String builderImageName = this.builderImage.toUpperCase();
if (builderImageName.equals(BuilderImageProvider.GRAALVM.name())) {
return DEFAULT_GRAALVM_BUILDER_IMAGE;
} else if (builderImageName.equals(BuilderImageProvider.MANDREL.name())) {
return DEFAULT_MANDREL_BUILDER_IMAGE;
} else {
return this.builderImage;
}
} | class NativeConfig {
/**
* Comma-separated, additional arguments to pass to the build process.
* If an argument includes the {@code ,} symbol, it needs to be escaped, e.g. {@code \\,}
*/
@ConfigItem
public Optional<List<String>> additionalBuildArgs;
/**
* If the HTTP url handler should be enabled, allowing you to do URL.openConnection() for HTTP URLs
*/
@ConfigItem(defaultValue = "true")
public boolean enableHttpUrlHandler;
/**
* If the HTTPS url handler should be enabled, allowing you to do URL.openConnection() for HTTPS URLs
*/
@ConfigItem
public boolean enableHttpsUrlHandler;
/**
* If all security services should be added to the native image
*
* @deprecated {@code --enable-all-security-services} was removed in GraalVM 21.1 https:
*/
@ConfigItem
@Deprecated
public boolean enableAllSecurityServices;
/**
* If {@code -H:+InlineBeforeAnalysis} flag will be added to the native-image run
*/
@ConfigItem(defaultValue = "true")
public boolean inlineBeforeAnalysis;
/**
* @deprecated JNI is always enabled starting from GraalVM 19.3.1.
*/
@Deprecated
@ConfigItem(defaultValue = "true")
public boolean enableJni;
/**
* The default value for java.awt.headless JVM option.
* Switching this option affects linking of awt libraries.
*/
@ConfigItem(defaultValue = "true")
public boolean headless;
/**
* Defines the user language used for building the native executable.
* <p>
* Defaults to the system one.
*/
@ConfigItem(defaultValue = "${user.language:}")
@ConvertWith(TrimmedStringConverter.class)
public Optional<String> userLanguage;
/**
* Defines the user country used for building the native executable.
* <p>
* Defaults to the system one.
*/
@ConfigItem(defaultValue = "${user.country:}")
@ConvertWith(TrimmedStringConverter.class)
public Optional<String> userCountry;
/**
* Defines the file encoding as in -Dfile.encoding=...
*
* Native image runtime uses the host's (i.e. build time) value of file.encoding
* system property. We intentionally default this to UTF-8 to avoid platform specific
* defaults to be picked up which can then result in inconsistent behavior in the
* generated native executable.
*/
@ConfigItem(defaultValue = "UTF-8")
@ConvertWith(TrimmedStringConverter.class)
public String fileEncoding;
/**
* If all character sets should be added to the native image. This increases image size
*/
@ConfigItem
public boolean addAllCharsets;
/**
* The location of the Graal distribution
*/
@ConfigItem(defaultValue = "${GRAALVM_HOME:}")
public Optional<String> graalvmHome;
/**
* The location of the JDK
*/
@ConfigItem(defaultValue = "${java.home}")
public File javaHome;
/**
* The maximum Java heap to be used during the native image generation
*/
@ConfigItem
public Optional<String> nativeImageXmx;
/**
* If the native image build should wait for a debugger to be attached before running. This is an advanced option
* and is generally only intended for those familiar with GraalVM internals
*/
@ConfigItem
public boolean debugBuildProcess;
/**
* If the debug port should be published when building with docker and debug-build-process is true
*/
@ConfigItem(defaultValue = "true")
public boolean publishDebugBuildProcessPort;
/**
* If the native image server should be restarted.
*
* @deprecated Since GraalVM 20.2.0 the native image server has become an experimental feature and is disabled by
* default.
*/
@Deprecated
@ConfigItem
public boolean cleanupServer;
/**
* If isolates should be enabled
*/
@ConfigItem(defaultValue = "true")
public boolean enableIsolates;
/**
* If a JVM based 'fallback image' should be created if native image fails. This is not recommended, as this is
* functionally the same as just running the application in a JVM
*/
@ConfigItem
public boolean enableFallbackImages;
/**
* If the native image server should be used. This can speed up compilation but can result in changes not always
* being picked up due to cache invalidation not working 100%
*
* @deprecated This used to be the default prior to GraalVM 20.2.0 and this configuration item was used to disable
* it as it was not stable. Since GraalVM 20.2.0 the native image server has become an experimental
* feature.
*/
@Deprecated
@ConfigItem
public boolean enableServer;
/**
* If all META-INF/services entries should be automatically registered
*/
@ConfigItem
public boolean autoServiceLoaderRegistration;
/**
* If the bytecode of all proxies should be dumped for inspection
*/
@ConfigItem
public boolean dumpProxies;
/**
* If this build should be done using a container runtime. Unless container-runtime is also set, docker will be
* used by default. If docker is not available or is an alias to podman, podman will be used instead as the default.
*/
@ConfigItem
public Optional<Boolean> containerBuild;
/**
* If this build is done using a remote docker daemon.
*/
@ConfigItem
public boolean remoteContainerBuild;
public boolean isContainerBuild() {
return containerBuild.orElse(containerRuntime.isPresent() || remoteContainerBuild);
}
/**
* The docker image to use to do the image build. It can be one of `graalvm`, `mandrel`, or the full image path, e.g.
* {@code quay.io/quarkus/ubi-quarkus-mandrel:21.3-java17}.
*/
@ConfigItem(defaultValue = "${platform.quarkus.native.builder-image}")
public String builderImage;
/**
* The container runtime (e.g. docker) that is used to do an image based build. If this is set then
* a container build is always done.
*/
@ConfigItem
public Optional<ContainerRuntime> containerRuntime;
/**
* Options to pass to the container runtime
*/
@ConfigItem
public Optional<List<String>> containerRuntimeOptions;
/**
* If the resulting image should allow VM introspection
*/
@ConfigItem
public boolean enableVmInspection;
/**
* If full stack traces are enabled in the resulting image
*/
@ConfigItem(defaultValue = "true")
public boolean fullStackTraces;
/**
* If the reports on call paths and included packages/classes/methods should be generated
*/
@ConfigItem
public boolean enableReports;
/**
* If exceptions should be reported with a full stack trace
*/
@ConfigItem(defaultValue = "true")
public boolean reportExceptionStackTraces;
/**
* If errors should be reported at runtime. This is a more relaxed setting, however it is not recommended as it
* means
* your application may fail at runtime if an unsupported feature is used by accident.
*/
@ConfigItem
public boolean reportErrorsAtRuntime;
/**
* Don't build a native image if it already exists.
*
* This is useful if you have already built an image and you want to use Quarkus to deploy it somewhere.
*
* Note that this is not able to detect if the existing image is outdated, if you have modified source
* or config and want a new image you must not use this flag.
*/
@ConfigItem(defaultValue = "false")
public boolean reuseExisting;
/**
* Build time configuration options for resources inclusion in the native executable.
*/
@ConfigItem
public ResourcesConfig resources;
@ConfigGroup
public static class ResourcesConfig {
/**
* A comma separated list of globs to match resource paths that should be added to the native image.
* <p>
* Use slash ({@code /}) as a path separator on all platforms. Globs must not start with slash.
* <p>
* By default, no resources are included.
* <p>
* Example: Given that you have {@code src/main/resources/ignored.png}
* and {@code src/main/resources/foo/selected.png} in your source tree and one of your dependency JARs contains
* {@code bar/some.txt} file, with the following configuration
*
* <pre>
* quarkus.native.resources.includes = foo/**,bar/**&
* </pre>
*
* the files {@code src/main/resources/foo/selected.png} and {@code bar/some.txt} will be included in the native
* image, while {@code src/main/resources/ignored.png} will not be included.
* <p>
* <h3>Supported glob features</h3>
* <table>
* <tr>
* <th>Feature</th>
* <th>Description</th>
* </tr>
* <tr>
* <td><code>*</code></td>
* <td>Matches a (possibly empty) sequence of characters that does not contain slash ({@code /})</td>
* </tr>
* <tr>
* <td><code>**</code></td>
* <td>Matches a (possibly empty) sequence of characters that may contain slash ({@code /})</td>
* </tr>
* <tr>
* <td><code>?</code></td>
* <td>Matches one character, but not slash</td>
* </tr>
* <tr>
* <td><code>[abc]</code></td>
* <td>Matches one character given in the bracket, but not slash</td>
* </tr>
* <tr>
* <td><code>[a-z]</code></td>
* <td>Matches one character from the range given in the bracket, but not slash</td>
* </tr>
* <tr>
* <td><code>[!abc]</code></td>
* <td>Matches one character not named in the bracket; does not match slash</td>
* </tr>
* <tr>
* <td><code>[a-z]</code></td>
* <td>Matches one character outside the range given in the bracket; does not match slash</td>
* </tr>
* <tr>
* <td><code>{one,two,three}</code></td>
* <td>Matches any of the alternating tokens separated by comma; the tokens may contain wildcards, nested
* alternations and ranges</td>
* </tr>
* <tr>
* <td><code>\</code></td>
* <td>The escape character</td>
* </tr>
* </table>
* <p>
* Note that there are three levels of escaping when passing this option via {@code application.properties}:
* <ol>
* <li>{@code application.properties} parser</li>
* <li>MicroProfile Config list converter that splits the comma separated list</li>
* <li>Glob parser</li>
* </ol>
* All three levels use backslash ({@code \}) as the escaping character. So you need to use an appropriate
* number of backslashes depending on which level you want to escape.
* <p>
* Note that Quarkus extensions typically include the resources they require by themselves. This option is
* useful in situations when the built-in functionality is not sufficient.
*/
@ConfigItem
public Optional<List<String>> includes;
/**
* A comma separated list of globs to match resource paths that should <b>not</b> be added to the native image.
* <p>
* Use slash ({@code /}) as a path separator on all platforms. Globs must not start with slash.
* <p>
* Please refer to {@link
* <p>
* By default, no resources are excluded.
* <p>
* Example: Given that you have {@code src/main/resources/red.png}
* and {@code src/main/resources/foo/green.png} in your source tree and one of your dependency JARs contains
* {@code bar/blue.png} file, with the following configuration
*
* <pre>
* quarkus.native.resources.includes = **&
* quarkus.native.resources.excludes = foo/**,**&
* </pre>
*
* the resource {@code red.png} will be available in the native image while the resources {@code foo/green.png}
* and {@code bar/blue.png} will not be available in the native image.
*/
@ConfigItem
public Optional<List<String>> excludes;
}
/**
* Debugging options.
*/
@ConfigItem
public Debug debug;
@ConfigGroup
public static class Debug {
/**
* If debug is enabled and debug symbols are generated.
* The symbols will be generated in a separate .debug file.
*/
@ConfigItem
public boolean enabled;
}
/**
* Generate the report files for GraalVM Dashboard.
*/
@ConfigItem
public boolean enableDashboardDump;
/**
* Configure native executable compression using UPX.
*/
@ConfigItem
public Compression compression;
@ConfigGroup
public static class Compression {
/**
* The compression level in [1, 10].
* 10 means <em>best</em>
*
* Higher compression level requires more time to compress the executable.
*/
@ConfigItem
public OptionalInt level;
/**
* Allows passing extra arguments to the UPX command line (like --brute).
* The arguments are comma-separated.
*
* The exhaustive list of parameters can be found in
* <a href="https:
*/
@ConfigItem
public Optional<List<String>> additionalArgs;
}
/**
* Supported Container runtimes
*/
public static enum ContainerRuntime {
DOCKER,
PODMAN;
public String getExecutableName() {
return this.name().toLowerCase();
}
}
/**
* Supported Builder Image providers/distributions
*/
public static enum BuilderImageProvider {
GRAALVM,
MANDREL;
}
} | class NativeConfig {
public static final String DEFAULT_GRAALVM_BUILDER_IMAGE = "quay.io/quarkus/ubi-quarkus-native-image:21.3-java11";
public static final String DEFAULT_MANDREL_BUILDER_IMAGE = "quay.io/quarkus/ubi-quarkus-mandrel:21.3-java11";
/**
* Comma-separated, additional arguments to pass to the build process.
* If an argument includes the {@code ,} symbol, it needs to be escaped, e.g. {@code \\,}
*/
@ConfigItem
public Optional<List<String>> additionalBuildArgs;
/**
* If the HTTP url handler should be enabled, allowing you to do URL.openConnection() for HTTP URLs
*/
@ConfigItem(defaultValue = "true")
public boolean enableHttpUrlHandler;
/**
* If the HTTPS url handler should be enabled, allowing you to do URL.openConnection() for HTTPS URLs
*/
@ConfigItem
public boolean enableHttpsUrlHandler;
/**
* If all security services should be added to the native image
*
* @deprecated {@code --enable-all-security-services} was removed in GraalVM 21.1 https:
*/
@ConfigItem
@Deprecated
public boolean enableAllSecurityServices;
/**
* If {@code -H:+InlineBeforeAnalysis} flag will be added to the native-image run
*/
@ConfigItem(defaultValue = "true")
public boolean inlineBeforeAnalysis;
/**
* @deprecated JNI is always enabled starting from GraalVM 19.3.1.
*/
@Deprecated
@ConfigItem(defaultValue = "true")
public boolean enableJni;
/**
* The default value for java.awt.headless JVM option.
* Switching this option affects linking of awt libraries.
*/
@ConfigItem(defaultValue = "true")
public boolean headless;
/**
* Defines the user language used for building the native executable.
* <p>
* Defaults to the system one.
*/
@ConfigItem(defaultValue = "${user.language:}")
@ConvertWith(TrimmedStringConverter.class)
public Optional<String> userLanguage;
/**
* Defines the user country used for building the native executable.
* <p>
* Defaults to the system one.
*/
@ConfigItem(defaultValue = "${user.country:}")
@ConvertWith(TrimmedStringConverter.class)
public Optional<String> userCountry;
/**
* Defines the file encoding as in -Dfile.encoding=...
*
* Native image runtime uses the host's (i.e. build time) value of file.encoding
* system property. We intentionally default this to UTF-8 to avoid platform specific
* defaults to be picked up which can then result in inconsistent behavior in the
* generated native executable.
*/
@ConfigItem(defaultValue = "UTF-8")
@ConvertWith(TrimmedStringConverter.class)
public String fileEncoding;
/**
* If all character sets should be added to the native image. This increases image size
*/
@ConfigItem
public boolean addAllCharsets;
/**
* The location of the Graal distribution
*/
@ConfigItem(defaultValue = "${GRAALVM_HOME:}")
public Optional<String> graalvmHome;
/**
* The location of the JDK
*/
@ConfigItem(defaultValue = "${java.home}")
public File javaHome;
/**
* The maximum Java heap to be used during the native image generation
*/
@ConfigItem
public Optional<String> nativeImageXmx;
/**
* If the native image build should wait for a debugger to be attached before running. This is an advanced option
* and is generally only intended for those familiar with GraalVM internals
*/
@ConfigItem
public boolean debugBuildProcess;
/**
* If the debug port should be published when building with docker and debug-build-process is true
*/
@ConfigItem(defaultValue = "true")
public boolean publishDebugBuildProcessPort;
/**
* If the native image server should be restarted.
*
* @deprecated Since GraalVM 20.2.0 the native image server has become an experimental feature and is disabled by
* default.
*/
@Deprecated
@ConfigItem
public boolean cleanupServer;
/**
* If isolates should be enabled
*/
@ConfigItem(defaultValue = "true")
public boolean enableIsolates;
/**
* If a JVM based 'fallback image' should be created if native image fails. This is not recommended, as this is
* functionally the same as just running the application in a JVM
*/
@ConfigItem
public boolean enableFallbackImages;
/**
* If the native image server should be used. This can speed up compilation but can result in changes not always
* being picked up due to cache invalidation not working 100%
*
* @deprecated This used to be the default prior to GraalVM 20.2.0 and this configuration item was used to disable
* it as it was not stable. Since GraalVM 20.2.0 the native image server has become an experimental
* feature.
*/
@Deprecated
@ConfigItem
public boolean enableServer;
/**
* If all META-INF/services entries should be automatically registered
*/
@ConfigItem
public boolean autoServiceLoaderRegistration;
/**
* If the bytecode of all proxies should be dumped for inspection
*/
@ConfigItem
public boolean dumpProxies;
/**
* If this build should be done using a container runtime. Unless container-runtime is also set, docker will be
* used by default. If docker is not available or is an alias to podman, podman will be used instead as the default.
*/
@ConfigItem
public Optional<Boolean> containerBuild;
/**
* If this build is done using a remote docker daemon.
*/
@ConfigItem
public boolean remoteContainerBuild;
public boolean isContainerBuild() {
return containerBuild.orElse(containerRuntime.isPresent() || remoteContainerBuild);
}
/**
* The docker image to use to do the image build. It can be one of `graalvm`, `mandrel`, or the full image path, e.g.
* {@code quay.io/quarkus/ubi-quarkus-mandrel:21.3-java17}.
*/
@ConfigItem(defaultValue = "${platform.quarkus.native.builder-image}")
public String builderImage;
/**
* The container runtime (e.g. docker) that is used to do an image based build. If this is set then
* a container build is always done.
*/
@ConfigItem
public Optional<ContainerRuntime> containerRuntime;
/**
* Options to pass to the container runtime
*/
@ConfigItem
public Optional<List<String>> containerRuntimeOptions;
/**
* If the resulting image should allow VM introspection
*/
@ConfigItem
public boolean enableVmInspection;
/**
* If full stack traces are enabled in the resulting image
*/
@ConfigItem(defaultValue = "true")
public boolean fullStackTraces;
/**
* If the reports on call paths and included packages/classes/methods should be generated
*/
@ConfigItem
public boolean enableReports;
/**
* If exceptions should be reported with a full stack trace
*/
@ConfigItem(defaultValue = "true")
public boolean reportExceptionStackTraces;
/**
* If errors should be reported at runtime. This is a more relaxed setting, however it is not recommended as it
* means
* your application may fail at runtime if an unsupported feature is used by accident.
*/
@ConfigItem
public boolean reportErrorsAtRuntime;
/**
* Don't build a native image if it already exists.
*
* This is useful if you have already built an image and you want to use Quarkus to deploy it somewhere.
*
* Note that this is not able to detect if the existing image is outdated, if you have modified source
* or config and want a new image you must not use this flag.
*/
@ConfigItem(defaultValue = "false")
public boolean reuseExisting;
/**
* Build time configuration options for resources inclusion in the native executable.
*/
@ConfigItem
public ResourcesConfig resources;
@ConfigGroup
public static class ResourcesConfig {
/**
* A comma separated list of globs to match resource paths that should be added to the native image.
* <p>
* Use slash ({@code /}) as a path separator on all platforms. Globs must not start with slash.
* <p>
* By default, no resources are included.
* <p>
* Example: Given that you have {@code src/main/resources/ignored.png}
* and {@code src/main/resources/foo/selected.png} in your source tree and one of your dependency JARs contains
* {@code bar/some.txt} file, with the following configuration
*
* <pre>
* quarkus.native.resources.includes = foo/**,bar/**&
* </pre>
*
* the files {@code src/main/resources/foo/selected.png} and {@code bar/some.txt} will be included in the native
* image, while {@code src/main/resources/ignored.png} will not be included.
* <p>
* <h3>Supported glob features</h3>
* <table>
* <tr>
* <th>Feature</th>
* <th>Description</th>
* </tr>
* <tr>
* <td><code>*</code></td>
* <td>Matches a (possibly empty) sequence of characters that does not contain slash ({@code /})</td>
* </tr>
* <tr>
* <td><code>**</code></td>
* <td>Matches a (possibly empty) sequence of characters that may contain slash ({@code /})</td>
* </tr>
* <tr>
* <td><code>?</code></td>
* <td>Matches one character, but not slash</td>
* </tr>
* <tr>
* <td><code>[abc]</code></td>
* <td>Matches one character given in the bracket, but not slash</td>
* </tr>
* <tr>
* <td><code>[a-z]</code></td>
* <td>Matches one character from the range given in the bracket, but not slash</td>
* </tr>
* <tr>
* <td><code>[!abc]</code></td>
* <td>Matches one character not named in the bracket; does not match slash</td>
* </tr>
* <tr>
* <td><code>[a-z]</code></td>
* <td>Matches one character outside the range given in the bracket; does not match slash</td>
* </tr>
* <tr>
* <td><code>{one,two,three}</code></td>
* <td>Matches any of the alternating tokens separated by comma; the tokens may contain wildcards, nested
* alternations and ranges</td>
* </tr>
* <tr>
* <td><code>\</code></td>
* <td>The escape character</td>
* </tr>
* </table>
* <p>
* Note that there are three levels of escaping when passing this option via {@code application.properties}:
* <ol>
* <li>{@code application.properties} parser</li>
* <li>MicroProfile Config list converter that splits the comma separated list</li>
* <li>Glob parser</li>
* </ol>
* All three levels use backslash ({@code \}) as the escaping character. So you need to use an appropriate
* number of backslashes depending on which level you want to escape.
* <p>
* Note that Quarkus extensions typically include the resources they require by themselves. This option is
* useful in situations when the built-in functionality is not sufficient.
*/
@ConfigItem
public Optional<List<String>> includes;
/**
* A comma separated list of globs to match resource paths that should <b>not</b> be added to the native image.
* <p>
* Use slash ({@code /}) as a path separator on all platforms. Globs must not start with slash.
* <p>
* Please refer to {@link
* <p>
* By default, no resources are excluded.
* <p>
* Example: Given that you have {@code src/main/resources/red.png}
* and {@code src/main/resources/foo/green.png} in your source tree and one of your dependency JARs contains
* {@code bar/blue.png} file, with the following configuration
*
* <pre>
* quarkus.native.resources.includes = **&
* quarkus.native.resources.excludes = foo/**,**&
* </pre>
*
* the resource {@code red.png} will be available in the native image while the resources {@code foo/green.png}
* and {@code bar/blue.png} will not be available in the native image.
*/
@ConfigItem
public Optional<List<String>> excludes;
}
/**
* Debugging options.
*/
@ConfigItem
public Debug debug;
@ConfigGroup
public static class Debug {
/**
* If debug is enabled and debug symbols are generated.
* The symbols will be generated in a separate .debug file.
*/
@ConfigItem
public boolean enabled;
}
/**
* Generate the report files for GraalVM Dashboard.
*/
@ConfigItem
public boolean enableDashboardDump;
/**
* Configure native executable compression using UPX.
*/
@ConfigItem
public Compression compression;
@ConfigGroup
public static class Compression {
/**
* The compression level in [1, 10].
* 10 means <em>best</em>
*
* Higher compression level requires more time to compress the executable.
*/
@ConfigItem
public OptionalInt level;
/**
* Allows passing extra arguments to the UPX command line (like --brute).
* The arguments are comma-separated.
*
* The exhaustive list of parameters can be found in
* <a href="https:
*/
@ConfigItem
public Optional<List<String>> additionalArgs;
}
/**
* Supported Container runtimes
*/
public static enum ContainerRuntime {
DOCKER,
PODMAN;
public String getExecutableName() {
return this.name().toLowerCase();
}
}
/**
* Supported Builder Image providers/distributions
*/
public static enum BuilderImageProvider {
GRAALVM,
MANDREL;
}
} |
Why do we need this change? | public void invoke(IN value, SinkFunction.Context context) throws Exception {
buckets.onElement(
value,
context.currentProcessingTime(),
context.timestamp(),
context.currentWatermark());
} | buckets.onElement( | public void invoke(IN value, SinkFunction.Context context) throws Exception {
this.helper.onElement(
value,
context.currentProcessingTime(),
context.timestamp(),
context.currentWatermark());
} | class DefaultBulkFormatBuilder<IN> extends BulkFormatBuilder<IN, String, DefaultBulkFormatBuilder<IN>> {
private static final long serialVersionUID = 7493169281036370228L;
private DefaultBulkFormatBuilder(Path basePath, BulkWriter.Factory<IN> writerFactory, BucketAssigner<IN, String> assigner) {
super(basePath, writerFactory, assigner);
}
} | class DefaultBulkFormatBuilder<IN> extends BulkFormatBuilder<IN, String, DefaultBulkFormatBuilder<IN>> {
private static final long serialVersionUID = 7493169281036370228L;
private DefaultBulkFormatBuilder(Path basePath, BulkWriter.Factory<IN> writerFactory, BucketAssigner<IN, String> assigner) {
super(basePath, writerFactory, assigner);
}
} |
check children.size() equal first to avoid index out of bound | public boolean equals(Object o) {
if (!(o instanceof MapLiteral)) {
return false;
}
if (this == o) {
return true;
}
MapLiteral that = (MapLiteral) o;
for (int i = 0; i < children.size(); i++) {
if (!children.get(i).equals(that.children.get(i))) {
return false;
}
}
return true;
} | for (int i = 0; i < children.size(); i++) { | public boolean equals(Object o) {
if (!(o instanceof MapLiteral)) {
return false;
}
if (this == o) {
return true;
}
MapLiteral that = (MapLiteral) o;
return Objects.equals(children, that.children);
} | class MapLiteral extends LiteralExpr {
public MapLiteral() {
type = new MapType(Type.NULL, Type.NULL);
children = new ArrayList<>();
}
public MapLiteral(Type type, List<LiteralExpr> keys, List<LiteralExpr> values) {
this.type = type;
children = Lists.newArrayList();
for (int i = 0; i < keys.size(); i++) {
children.add(keys.get(i));
children.add(values.get(i));
}
}
public MapLiteral(LiteralExpr... exprs) throws AnalysisException {
Type keyType = Type.NULL;
Type valueType = Type.NULL;
children = new ArrayList<>();
for (int idx = 0; idx < exprs.length && idx + 1 < exprs.length; idx += 2) {
if (!MapType.MAP.supportSubType(exprs[idx].getType())) {
throw new AnalysisException("Invalid key type in Map, not support " + exprs[idx].getType());
}
boolean enableDecimal256 = SessionVariable.getEnableDecimal256();
keyType = Type.getAssignmentCompatibleType(keyType, exprs[idx].getType(), true, enableDecimal256);
valueType = Type.getAssignmentCompatibleType(valueType, exprs[idx + 1].getType(), true, enableDecimal256);
}
if (keyType == Type.INVALID) {
throw new AnalysisException("Invalid key type in Map.");
}
if (valueType == Type.INVALID) {
throw new AnalysisException("Invalid value type in Map.");
}
try {
for (int idx = 0; idx < exprs.length && idx + 1 < exprs.length; idx += 2) {
if (exprs[idx].getType().equals(keyType)) {
children.add(exprs[idx]);
} else {
children.add(exprs[idx].convertTo(keyType));
}
if (exprs[idx + 1].getType().equals(valueType)) {
children.add(exprs[idx + 1]);
} else {
children.add(exprs[idx + 1].convertTo(valueType));
}
}
} catch (AnalysisException e) {
String s = "{" + StringUtils.join(exprs, ',') + "}";
throw new AnalysisException("Invalid Map " + s + " literal: " + e.getMessage());
}
type = new MapType(keyType, valueType);
}
protected MapLiteral(MapLiteral other) {
super(other);
}
@Override
public LiteralExpr convertTo(Type targetType) throws AnalysisException {
Preconditions.checkState(targetType instanceof MapType);
Type keyType = ((MapType) targetType).getKeyType();
Type valType = ((MapType) targetType).getValueType();
LiteralExpr[] literals = new LiteralExpr[children.size()];
for (int i = 0; i < children.size(); i += 2) {
literals[i] = (LiteralExpr) Expr.convertLiteral(children.get(i), keyType);
literals[i + 1] = (LiteralExpr) Expr.convertLiteral(children.get(i + 1), valType);
}
return new MapLiteral(literals);
}
@Override
public Expr uncheckedCastTo(Type targetType) throws AnalysisException {
if (!targetType.isMapType()) {
return super.uncheckedCastTo(targetType);
}
MapLiteral literal = new MapLiteral(this);
Type keyType = ((MapType) targetType).getKeyType();
Type valueType = ((MapType) targetType).getValueType();
for (int i = 0; i < children.size() && i + 1 < children.size(); i += 2) {
Expr key = Expr.convertLiteral(children.get(i), keyType);
Expr value = Expr.convertLiteral(children.get(i + 1), valueType);
if ((!key.isLiteral()) || (!value.isLiteral())) {
throw new AnalysisException("Unexpected map literal cast failed. from type: "
+ this.type + ", to type: " + targetType);
}
literal.children.set(i, key);
literal.children.set(i + 1, value);
}
literal.setType(targetType);
return literal;
}
@Override
public void checkValueValid() throws AnalysisException {
for (Expr e : children) {
e.checkValueValid();
}
}
@Override
public String getStringValue() {
List<String> list = new ArrayList<>(children.size());
for (int i = 0; i < children.size() && i + 1 < children.size(); i += 2) {
list.add(children.get(i).getStringValue() + ":" + children.get(i + 1).getStringValue());
}
return "{" + StringUtils.join(list, ", ") + "}";
}
@Override
public String getStringValueForArray() {
return null;
}
@Override
protected String toSqlImpl() {
List<String> list = new ArrayList<>(children.size());
for (int i = 0; i < children.size() && i + 1 < children.size(); i += 2) {
list.add(children.get(i).toSqlImpl() + ":" + children.get(i + 1).toSqlImpl());
}
return "MAP{" + StringUtils.join(list, ", ") + "}";
}
@Override
protected void toThrift(TExprNode msg) {
msg.node_type = TExprNodeType.MAP_LITERAL;
TTypeDesc container = new TTypeDesc();
container.setTypes(new ArrayList<TTypeNode>());
type.toThrift(container);
msg.setType(container);
}
@Override
public Expr clone() {
return new MapLiteral(this);
}
@Override
public boolean isMinValue() {
return false;
}
@Override
public int compareLiteral(LiteralExpr expr) {
return 0;
}
@Override
public void readFields(DataInput in) throws IOException {
super.readFields(in);
int size = in.readInt();
children = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
children.add(Expr.readIn(in));
}
}
public static MapLiteral read(DataInput in) throws IOException {
MapLiteral literal = new MapLiteral();
literal.readFields(in);
return literal;
}
@Override
public void write(DataOutput out) throws IOException {
super.write(out);
out.writeInt(children.size());
for (Expr e : children) {
Expr.writeTo(e, out);
}
}
@Override
public int hashCode() {
int code = 31 * super.hashCode();
for (Expr c : children) {
code = code + c.hashCode();
}
return code;
}
@Override
} | class MapLiteral extends LiteralExpr {
public MapLiteral() {
type = new MapType(Type.NULL, Type.NULL);
children = new ArrayList<>();
}
public MapLiteral(Type type, List<LiteralExpr> keys, List<LiteralExpr> values) {
this.type = type;
children = Lists.newArrayList();
for (int i = 0; i < keys.size(); i++) {
children.add(keys.get(i));
children.add(values.get(i));
}
}
public MapLiteral(LiteralExpr... exprs) throws AnalysisException {
Type keyType = Type.NULL;
Type valueType = Type.NULL;
children = new ArrayList<>();
for (int idx = 0; idx < exprs.length && idx + 1 < exprs.length; idx += 2) {
if (!MapType.MAP.supportSubType(exprs[idx].getType())) {
throw new AnalysisException("Invalid key type in Map, not support " + exprs[idx].getType());
}
boolean enableDecimal256 = SessionVariable.getEnableDecimal256();
keyType = Type.getAssignmentCompatibleType(keyType, exprs[idx].getType(), true, enableDecimal256);
valueType = Type.getAssignmentCompatibleType(valueType, exprs[idx + 1].getType(), true, enableDecimal256);
}
if (keyType == Type.INVALID) {
throw new AnalysisException("Invalid key type in Map.");
}
if (valueType == Type.INVALID) {
throw new AnalysisException("Invalid value type in Map.");
}
try {
for (int idx = 0; idx < exprs.length && idx + 1 < exprs.length; idx += 2) {
if (exprs[idx].getType().equals(keyType)) {
children.add(exprs[idx]);
} else {
children.add(exprs[idx].convertTo(keyType));
}
if (exprs[idx + 1].getType().equals(valueType)) {
children.add(exprs[idx + 1]);
} else {
children.add(exprs[idx + 1].convertTo(valueType));
}
}
} catch (AnalysisException e) {
String s = "{" + StringUtils.join(exprs, ',') + "}";
throw new AnalysisException("Invalid Map " + s + " literal: " + e.getMessage());
}
type = new MapType(keyType, valueType);
}
protected MapLiteral(MapLiteral other) {
super(other);
}
@Override
public LiteralExpr convertTo(Type targetType) throws AnalysisException {
Preconditions.checkState(targetType instanceof MapType);
Type keyType = ((MapType) targetType).getKeyType();
Type valType = ((MapType) targetType).getValueType();
LiteralExpr[] literals = new LiteralExpr[children.size()];
for (int i = 0; i < children.size(); i += 2) {
literals[i] = (LiteralExpr) Expr.convertLiteral(children.get(i), keyType);
literals[i + 1] = (LiteralExpr) Expr.convertLiteral(children.get(i + 1), valType);
}
return new MapLiteral(literals);
}
@Override
public Expr uncheckedCastTo(Type targetType) throws AnalysisException {
if (!targetType.isMapType()) {
return super.uncheckedCastTo(targetType);
}
MapLiteral literal = new MapLiteral(this);
Type keyType = ((MapType) targetType).getKeyType();
Type valueType = ((MapType) targetType).getValueType();
for (int i = 0; i < children.size() && i + 1 < children.size(); i += 2) {
Expr key = Expr.convertLiteral(children.get(i), keyType);
Expr value = Expr.convertLiteral(children.get(i + 1), valueType);
if ((!key.isLiteral()) || (!value.isLiteral())) {
throw new AnalysisException("Unexpected map literal cast failed. from type: "
+ this.type + ", to type: " + targetType);
}
literal.children.set(i, key);
literal.children.set(i + 1, value);
}
literal.setType(targetType);
return literal;
}
@Override
public void checkValueValid() throws AnalysisException {
for (Expr e : children) {
e.checkValueValid();
}
}
@Override
public String getStringValue() {
List<String> list = new ArrayList<>(children.size());
for (int i = 0; i < children.size() && i + 1 < children.size(); i += 2) {
list.add(children.get(i).getStringValue() + ":" + children.get(i + 1).getStringValue());
}
return "{" + StringUtils.join(list, ", ") + "}";
}
@Override
public String getStringValueForArray() {
return null;
}
@Override
protected String toSqlImpl() {
List<String> list = new ArrayList<>(children.size());
for (int i = 0; i < children.size() && i + 1 < children.size(); i += 2) {
list.add(children.get(i).toSqlImpl() + ":" + children.get(i + 1).toSqlImpl());
}
return "MAP{" + StringUtils.join(list, ", ") + "}";
}
@Override
protected void toThrift(TExprNode msg) {
msg.node_type = TExprNodeType.MAP_LITERAL;
TTypeDesc container = new TTypeDesc();
container.setTypes(new ArrayList<TTypeNode>());
type.toThrift(container);
msg.setType(container);
}
@Override
public Expr clone() {
return new MapLiteral(this);
}
@Override
public boolean isMinValue() {
return false;
}
@Override
public int compareLiteral(LiteralExpr expr) {
return 0;
}
@Override
public void readFields(DataInput in) throws IOException {
super.readFields(in);
int size = in.readInt();
children = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
children.add(Expr.readIn(in));
}
}
public static MapLiteral read(DataInput in) throws IOException {
MapLiteral literal = new MapLiteral();
literal.readFields(in);
return literal;
}
@Override
public void write(DataOutput out) throws IOException {
super.write(out);
out.writeInt(children.size());
for (Expr e : children) {
Expr.writeTo(e, out);
}
}
@Override
public int hashCode() {
return Objects.hashCode(children);
}
@Override
} |
```Files.readAllBytes``` was giving some errors | public void testAddAndRemoveExtension() throws IOException, URISyntaxException, InterruptedException {
final File projectDir = getProjectDir("add-remove-extension-single-module");
runGradleWrapper(projectDir, ":addExtension", "--extensions=hibernate-orm");
final Path Build = projectDir.toPath().resolve("build.gradle");
assertThat(Build).exists();
assertThat(Files.readString(Build)).contains("implementation 'io.quarkus:quarkus-hibernate-orm'");
runGradleWrapper(projectDir, ":removeExtension", "--extensions=hibernate-orm");
assertThat(Files.readString(Build)).doesNotContain("implementation 'io.quarkus:quarkus-hibernate-orm'");
} | assertThat(Files.readString(Build)).contains("implementation 'io.quarkus:quarkus-hibernate-orm'"); | public void testAddAndRemoveExtension() throws IOException, URISyntaxException, InterruptedException {
final File projectDir = getProjectDir("add-remove-extension-single-module");
runGradleWrapper(projectDir, ":addExtension", "--extensions=hibernate-orm");
final Path build = projectDir.toPath().resolve("build.gradle");
assertThat(build).exists();
assertThat(new String(Files.readAllBytes(build))).contains("implementation 'io.quarkus:quarkus-hibernate-orm'");
runGradleWrapper(projectDir, ":removeExtension", "--extensions=hibernate-orm");
assertThat(new String(Files.readAllBytes(build))).doesNotContain("implementation 'io.quarkus:quarkus-hibernate-orm'");
} | class AddExtensionToSingleModuleProjectTest extends QuarkusGradleWrapperTestBase {
@Test
@Test
public void testRemoveNonExistentExtension() throws IOException, URISyntaxException, InterruptedException {
final File projectDir = getProjectDir("add-remove-extension-single-module");
runGradleWrapper(projectDir, "clean", "build");
final Path Build = projectDir.toPath().resolve("build.gradle");
assertThat(Build).exists();
assertThat(Files.readString(Build)).doesNotContain("implementation 'io.quarkus:quarkus-hibernate-orm'");
runGradleWrapper(projectDir, ":removeExtension", "--extensions=hibernate-orm");
assertThat(Files.readString(Build)).doesNotContain("implementation 'io.quarkus:quarkus-hibernate-orm'");
}
} | class AddExtensionToSingleModuleProjectTest extends QuarkusGradleWrapperTestBase {
@Test
@Test
public void testRemoveNonExistentExtension() throws IOException, URISyntaxException, InterruptedException {
final File projectDir = getProjectDir("add-remove-extension-single-module");
runGradleWrapper(projectDir, "clean", "build");
final Path build = projectDir.toPath().resolve("build.gradle");
assertThat(build).exists();
assertThat(new String(Files.readAllBytes(build))).doesNotContain("implementation 'io.quarkus:quarkus-hibernate-orm'");
runGradleWrapper(projectDir, ":removeExtension", "--extensions=hibernate-orm");
assertThat(new String(Files.readAllBytes(build))).doesNotContain("implementation 'io.quarkus:quarkus-hibernate-orm'");
}
} |
how should I go about adding it to the internal suite? | public void testKafkaIOExternalRoundtripWithMetadataAndNullKeysAndValues() {
List<byte[]> nullList = new ArrayList<>();
nullList.add(null);
writePipeline
.apply(Create.of(nullList))
.apply(
ParDo.of(
new DoFn<byte[], KV<byte[], byte[]>>() {
@ProcessElement
public void processElement(
@Element byte[] element, OutputReceiver<KV<byte[], byte[]>> receiver) {
receiver.output(KV.of(element, element));
}
}))
.apply(
KafkaIO.<byte[], byte[]>write()
.withBootstrapServers(options.getKafkaBootstrapServerAddresses())
.withTopic(options.getKafkaTopic())
.withKeySerializer(ByteArraySerializer.class)
.withValueSerializer(ByteArraySerializer.class));
PipelineResult writeResult = writePipeline.run();
writeResult.waitUntilFinish();
readPipeline.apply(
KafkaIO.<byte[], byte[]>read()
.withBootstrapServers(options.getKafkaBootstrapServerAddresses())
.withTopic(options.getKafkaTopic())
.withKeyDeserializerAndCoder(
ByteArrayDeserializer.class, NullableCoder.of(ByteArrayCoder.of()))
.withValueDeserializerAndCoder(
ByteArrayDeserializer.class, NullableCoder.of(ByteArrayCoder.of()))
.externalWithMetadata());
PipelineResult readResult = readPipeline.run();
readResult.waitUntilFinish();
} | readResult.waitUntilFinish(); | public void testKafkaIOExternalRoundtripWithMetadataAndNullKeysAndValues() {
List<byte[]> nullList = new ArrayList<>();
nullList.add(null);
writePipeline
.apply(Create.of(nullList))
.apply(
ParDo.of(
new DoFn<byte[], KV<byte[], byte[]>>() {
@ProcessElement
public void processElement(
@Element byte[] element, OutputReceiver<KV<byte[], byte[]>> receiver) {
receiver.output(KV.of(element, element));
}
}))
.apply(
KafkaIO.<byte[], byte[]>write()
.withBootstrapServers(options.getKafkaBootstrapServerAddresses())
.withTopic(options.getKafkaTopic())
.withKeySerializer(ByteArraySerializer.class)
.withValueSerializer(ByteArraySerializer.class));
PipelineResult writeResult = writePipeline.run();
writeResult.waitUntilFinish();
PCollection<Row> rows =
readPipeline.apply(
KafkaIO.<byte[], byte[]>read()
.withBootstrapServers(options.getKafkaBootstrapServerAddresses())
.withTopic(options.getKafkaTopic())
.withKeyDeserializerAndCoder(
ByteArrayDeserializer.class, NullableCoder.of(ByteArrayCoder.of()))
.withValueDeserializerAndCoder(
ByteArrayDeserializer.class, NullableCoder.of(ByteArrayCoder.of()))
.externalWithMetadata());
PAssert.thatSingleton(rows)
.satisfies(
actualRow -> {
assertNull(actualRow.getString("key"));
assertNull(actualRow.getString("value"));
return null;
});
PipelineResult readResult = readPipeline.run();
readResult.waitUntilFinish();
} | class KafkaIOIT {
private static final String READ_TIME_METRIC_NAME = "read_time";
private static final String WRITE_TIME_METRIC_NAME = "write_time";
private static final String RUN_TIME_METRIC_NAME = "run_time";
private static final String READ_ELEMENT_METRIC_NAME = "kafka_read_element_count";
private static final String NAMESPACE = KafkaIOIT.class.getName();
private static final String TEST_ID = UUID.randomUUID().toString();
private static final String TIMESTAMP = Timestamp.now().toString();
private static String expectedHashcode;
private static SyntheticSourceOptions sourceOptions;
private static Options options;
private static InfluxDBSettings settings;
@Rule public TestPipeline writePipeline = TestPipeline.create();
@Rule public TestPipeline readPipeline = TestPipeline.create();
private static KafkaContainer kafkaContainer;
@BeforeClass
public static void setup() throws IOException {
options = IOITHelper.readIOTestPipelineOptions(Options.class);
sourceOptions = fromJsonString(options.getSourceOptions(), SyntheticSourceOptions.class);
if (options.isWithTestcontainers()) {
setupKafkaContainer();
} else {
settings =
InfluxDBSettings.builder()
.withHost(options.getInfluxHost())
.withDatabase(options.getInfluxDatabase())
.withMeasurement(options.getInfluxMeasurement())
.get();
}
}
@AfterClass
public static void afterClass() {
if (kafkaContainer != null) {
kafkaContainer.stop();
}
}
@Test
public void testKafkaIOReadsAndWritesCorrectlyInStreaming() throws IOException {
writePipeline
.apply("Generate records", Read.from(new SyntheticBoundedSource(sourceOptions)))
.apply("Measure write time", ParDo.of(new TimeMonitor<>(NAMESPACE, WRITE_TIME_METRIC_NAME)))
.apply("Write to Kafka", writeToKafka());
readPipeline.getOptions().as(Options.class).setStreaming(true);
readPipeline
.apply("Read from unbounded Kafka", readFromKafka())
.apply("Measure read time", ParDo.of(new TimeMonitor<>(NAMESPACE, READ_TIME_METRIC_NAME)))
.apply("Map records to strings", MapElements.via(new MapKafkaRecordsToStrings()))
.apply("Counting element", ParDo.of(new CountingFn(NAMESPACE, READ_ELEMENT_METRIC_NAME)));
PipelineResult writeResult = writePipeline.run();
writeResult.waitUntilFinish();
PipelineResult readResult = readPipeline.run();
PipelineResult.State readState =
readResult.waitUntilFinish(Duration.standardSeconds(options.getReadTimeout()));
cancelIfTimeouted(readResult, readState);
assertEquals(
sourceOptions.numRecords,
readElementMetric(readResult, NAMESPACE, READ_ELEMENT_METRIC_NAME));
if (!options.isWithTestcontainers()) {
Set<NamedTestResult> metrics = readMetrics(writeResult, readResult);
IOITMetrics.publishToInflux(TEST_ID, TIMESTAMP, metrics, settings);
}
}
@Test
public void testKafkaIOReadsAndWritesCorrectlyInBatch() throws IOException {
Map<Long, String> expectedHashes =
ImmutableMap.of(
1000L, "4507649971ee7c51abbb446e65a5c660",
100_000_000L, "0f12c27c9a7672e14775594be66cad9a");
expectedHashcode = getHashForRecordCount(sourceOptions.numRecords, expectedHashes);
writePipeline
.apply("Generate records", Read.from(new SyntheticBoundedSource(sourceOptions)))
.apply("Measure write time", ParDo.of(new TimeMonitor<>(NAMESPACE, WRITE_TIME_METRIC_NAME)))
.apply("Write to Kafka", writeToKafka());
PCollection<String> hashcode =
readPipeline
.apply("Read from bounded Kafka", readFromBoundedKafka())
.apply(
"Measure read time", ParDo.of(new TimeMonitor<>(NAMESPACE, READ_TIME_METRIC_NAME)))
.apply("Map records to strings", MapElements.via(new MapKafkaRecordsToStrings()))
.apply("Calculate hashcode", Combine.globally(new HashingFn()).withoutDefaults());
PAssert.thatSingleton(hashcode).isEqualTo(expectedHashcode);
PipelineResult writeResult = writePipeline.run();
writeResult.waitUntilFinish();
PipelineResult readResult = readPipeline.run();
PipelineResult.State readState =
readResult.waitUntilFinish(Duration.standardSeconds(options.getReadTimeout()));
cancelIfTimeouted(readResult, readState);
if (!options.isWithTestcontainers()) {
Set<NamedTestResult> metrics = readMetrics(writeResult, readResult);
IOITMetrics.publishToInflux(TEST_ID, TIMESTAMP, metrics, settings);
}
}
@Test
private long readElementMetric(PipelineResult result, String namespace, String name) {
MetricsReader metricsReader = new MetricsReader(result, namespace);
return metricsReader.getCounterMetric(name);
}
private Set<NamedTestResult> readMetrics(PipelineResult writeResult, PipelineResult readResult) {
BiFunction<MetricsReader, String, NamedTestResult> supplier =
(reader, metricName) -> {
long start = reader.getStartTimeMetric(metricName);
long end = reader.getEndTimeMetric(metricName);
return NamedTestResult.create(TEST_ID, TIMESTAMP, metricName, (end - start) / 1e3);
};
NamedTestResult writeTime =
supplier.apply(new MetricsReader(writeResult, NAMESPACE), WRITE_TIME_METRIC_NAME);
NamedTestResult readTime =
supplier.apply(new MetricsReader(readResult, NAMESPACE), READ_TIME_METRIC_NAME);
NamedTestResult runTime =
NamedTestResult.create(
TEST_ID, TIMESTAMP, RUN_TIME_METRIC_NAME, writeTime.getValue() + readTime.getValue());
return ImmutableSet.of(readTime, writeTime, runTime);
}
private void cancelIfTimeouted(PipelineResult readResult, PipelineResult.State readState)
throws IOException {
if (readState == null) {
readResult.cancel();
}
}
private KafkaIO.Write<byte[], byte[]> writeToKafka() {
return KafkaIO.<byte[], byte[]>write()
.withBootstrapServers(options.getKafkaBootstrapServerAddresses())
.withTopic(options.getKafkaTopic())
.withKeySerializer(ByteArraySerializer.class)
.withValueSerializer(ByteArraySerializer.class);
}
private KafkaIO.Read<byte[], byte[]> readFromBoundedKafka() {
return readFromKafka().withMaxNumRecords(sourceOptions.numRecords);
}
private KafkaIO.Read<byte[], byte[]> readFromKafka() {
return KafkaIO.readBytes()
.withBootstrapServers(options.getKafkaBootstrapServerAddresses())
.withConsumerConfigUpdates(ImmutableMap.of("auto.offset.reset", "earliest"))
.withTopic(options.getKafkaTopic());
}
private static class CountingFn extends DoFn<String, Void> {
private final Counter elementCounter;
CountingFn(String namespace, String name) {
elementCounter = Metrics.counter(namespace, name);
}
@ProcessElement
public void processElement() {
elementCounter.inc(1L);
}
}
/** Pipeline options specific for this test. */
public interface Options extends IOTestPipelineOptions, StreamingOptions {
@Description("Options for synthetic source.")
@Validation.Required
String getSourceOptions();
void setSourceOptions(String sourceOptions);
@Description("Kafka bootstrap server addresses")
@Default.String("localhost:9092")
String getKafkaBootstrapServerAddresses();
void setKafkaBootstrapServerAddresses(String address);
@Description("Kafka topic")
@Validation.Required
String getKafkaTopic();
void setKafkaTopic(String topic);
@Description("Time to wait for the events to be processed by the read pipeline (in seconds)")
@Validation.Required
Integer getReadTimeout();
void setReadTimeout(Integer readTimeout);
@Description("Whether to use testcontainers")
@Default.Boolean(false)
Boolean isWithTestcontainers();
void setWithTestcontainers(Boolean withTestcontainers);
@Description("Kafka container version in format 'X.Y.Z'. Use when useTestcontainers is true")
@Nullable
String getKafkaContainerVersion();
void setKafkaContainerVersion(String kafkaContainerVersion);
}
private static class MapKafkaRecordsToStrings
extends SimpleFunction<KafkaRecord<byte[], byte[]>, String> {
@Override
public String apply(KafkaRecord<byte[], byte[]> input) {
String key = Arrays.toString(input.getKV().getKey());
String value = Arrays.toString(input.getKV().getValue());
return String.format("%s %s", key, value);
}
}
public static String getHashForRecordCount(long recordCount, Map<Long, String> hashes) {
String hash = hashes.get(recordCount);
if (hash == null) {
throw new UnsupportedOperationException(
String.format("No hash for that record count: %s", recordCount));
}
return hash;
}
private static void setupKafkaContainer() {
kafkaContainer =
new KafkaContainer(
DockerImageName.parse("confluentinc/cp-kafka")
.withTag(options.getKafkaContainerVersion()));
kafkaContainer.start();
options.setKafkaBootstrapServerAddresses(kafkaContainer.getBootstrapServers());
}
} | class KafkaIOIT {
private static final String READ_TIME_METRIC_NAME = "read_time";
private static final String WRITE_TIME_METRIC_NAME = "write_time";
private static final String RUN_TIME_METRIC_NAME = "run_time";
private static final String READ_ELEMENT_METRIC_NAME = "kafka_read_element_count";
private static final String NAMESPACE = KafkaIOIT.class.getName();
private static final String TEST_ID = UUID.randomUUID().toString();
private static final String TIMESTAMP = Timestamp.now().toString();
private static String expectedHashcode;
private static SyntheticSourceOptions sourceOptions;
private static Options options;
private static InfluxDBSettings settings;
@Rule public TestPipeline writePipeline = TestPipeline.create();
@Rule public TestPipeline readPipeline = TestPipeline.create();
private static KafkaContainer kafkaContainer;
@BeforeClass
public static void setup() throws IOException {
options = IOITHelper.readIOTestPipelineOptions(Options.class);
sourceOptions = fromJsonString(options.getSourceOptions(), SyntheticSourceOptions.class);
if (options.isWithTestcontainers()) {
setupKafkaContainer();
} else {
settings =
InfluxDBSettings.builder()
.withHost(options.getInfluxHost())
.withDatabase(options.getInfluxDatabase())
.withMeasurement(options.getInfluxMeasurement())
.get();
}
}
@AfterClass
public static void afterClass() {
if (kafkaContainer != null) {
kafkaContainer.stop();
}
}
@Test
public void testKafkaIOReadsAndWritesCorrectlyInStreaming() throws IOException {
writePipeline
.apply("Generate records", Read.from(new SyntheticBoundedSource(sourceOptions)))
.apply("Measure write time", ParDo.of(new TimeMonitor<>(NAMESPACE, WRITE_TIME_METRIC_NAME)))
.apply("Write to Kafka", writeToKafka());
readPipeline.getOptions().as(Options.class).setStreaming(true);
readPipeline
.apply("Read from unbounded Kafka", readFromKafka())
.apply("Measure read time", ParDo.of(new TimeMonitor<>(NAMESPACE, READ_TIME_METRIC_NAME)))
.apply("Map records to strings", MapElements.via(new MapKafkaRecordsToStrings()))
.apply("Counting element", ParDo.of(new CountingFn(NAMESPACE, READ_ELEMENT_METRIC_NAME)));
PipelineResult writeResult = writePipeline.run();
writeResult.waitUntilFinish();
PipelineResult readResult = readPipeline.run();
PipelineResult.State readState =
readResult.waitUntilFinish(Duration.standardSeconds(options.getReadTimeout()));
cancelIfTimeouted(readResult, readState);
assertEquals(
sourceOptions.numRecords,
readElementMetric(readResult, NAMESPACE, READ_ELEMENT_METRIC_NAME));
if (!options.isWithTestcontainers()) {
Set<NamedTestResult> metrics = readMetrics(writeResult, readResult);
IOITMetrics.publishToInflux(TEST_ID, TIMESTAMP, metrics, settings);
}
}
@Test
public void testKafkaIOReadsAndWritesCorrectlyInBatch() throws IOException {
Map<Long, String> expectedHashes =
ImmutableMap.of(
1000L, "4507649971ee7c51abbb446e65a5c660",
100_000_000L, "0f12c27c9a7672e14775594be66cad9a");
expectedHashcode = getHashForRecordCount(sourceOptions.numRecords, expectedHashes);
writePipeline
.apply("Generate records", Read.from(new SyntheticBoundedSource(sourceOptions)))
.apply("Measure write time", ParDo.of(new TimeMonitor<>(NAMESPACE, WRITE_TIME_METRIC_NAME)))
.apply("Write to Kafka", writeToKafka());
PCollection<String> hashcode =
readPipeline
.apply("Read from bounded Kafka", readFromBoundedKafka())
.apply(
"Measure read time", ParDo.of(new TimeMonitor<>(NAMESPACE, READ_TIME_METRIC_NAME)))
.apply("Map records to strings", MapElements.via(new MapKafkaRecordsToStrings()))
.apply("Calculate hashcode", Combine.globally(new HashingFn()).withoutDefaults());
PAssert.thatSingleton(hashcode).isEqualTo(expectedHashcode);
PipelineResult writeResult = writePipeline.run();
writeResult.waitUntilFinish();
PipelineResult readResult = readPipeline.run();
PipelineResult.State readState =
readResult.waitUntilFinish(Duration.standardSeconds(options.getReadTimeout()));
cancelIfTimeouted(readResult, readState);
if (!options.isWithTestcontainers()) {
Set<NamedTestResult> metrics = readMetrics(writeResult, readResult);
IOITMetrics.publishToInflux(TEST_ID, TIMESTAMP, metrics, settings);
}
}
@Test
private long readElementMetric(PipelineResult result, String namespace, String name) {
MetricsReader metricsReader = new MetricsReader(result, namespace);
return metricsReader.getCounterMetric(name);
}
private Set<NamedTestResult> readMetrics(PipelineResult writeResult, PipelineResult readResult) {
BiFunction<MetricsReader, String, NamedTestResult> supplier =
(reader, metricName) -> {
long start = reader.getStartTimeMetric(metricName);
long end = reader.getEndTimeMetric(metricName);
return NamedTestResult.create(TEST_ID, TIMESTAMP, metricName, (end - start) / 1e3);
};
NamedTestResult writeTime =
supplier.apply(new MetricsReader(writeResult, NAMESPACE), WRITE_TIME_METRIC_NAME);
NamedTestResult readTime =
supplier.apply(new MetricsReader(readResult, NAMESPACE), READ_TIME_METRIC_NAME);
NamedTestResult runTime =
NamedTestResult.create(
TEST_ID, TIMESTAMP, RUN_TIME_METRIC_NAME, writeTime.getValue() + readTime.getValue());
return ImmutableSet.of(readTime, writeTime, runTime);
}
private void cancelIfTimeouted(PipelineResult readResult, PipelineResult.State readState)
throws IOException {
if (readState == null) {
readResult.cancel();
}
}
private KafkaIO.Write<byte[], byte[]> writeToKafka() {
return KafkaIO.<byte[], byte[]>write()
.withBootstrapServers(options.getKafkaBootstrapServerAddresses())
.withTopic(options.getKafkaTopic())
.withKeySerializer(ByteArraySerializer.class)
.withValueSerializer(ByteArraySerializer.class);
}
private KafkaIO.Read<byte[], byte[]> readFromBoundedKafka() {
return readFromKafka().withMaxNumRecords(sourceOptions.numRecords);
}
private KafkaIO.Read<byte[], byte[]> readFromKafka() {
return KafkaIO.readBytes()
.withBootstrapServers(options.getKafkaBootstrapServerAddresses())
.withConsumerConfigUpdates(ImmutableMap.of("auto.offset.reset", "earliest"))
.withTopic(options.getKafkaTopic());
}
private static class CountingFn extends DoFn<String, Void> {
private final Counter elementCounter;
CountingFn(String namespace, String name) {
elementCounter = Metrics.counter(namespace, name);
}
@ProcessElement
public void processElement() {
elementCounter.inc(1L);
}
}
/** Pipeline options specific for this test. */
public interface Options extends IOTestPipelineOptions, StreamingOptions {
@Description("Options for synthetic source.")
@Validation.Required
String getSourceOptions();
void setSourceOptions(String sourceOptions);
@Description("Kafka bootstrap server addresses")
@Default.String("localhost:9092")
String getKafkaBootstrapServerAddresses();
void setKafkaBootstrapServerAddresses(String address);
@Description("Kafka topic")
@Validation.Required
String getKafkaTopic();
void setKafkaTopic(String topic);
@Description("Time to wait for the events to be processed by the read pipeline (in seconds)")
@Validation.Required
Integer getReadTimeout();
void setReadTimeout(Integer readTimeout);
@Description("Whether to use testcontainers")
@Default.Boolean(false)
Boolean isWithTestcontainers();
void setWithTestcontainers(Boolean withTestcontainers);
@Description("Kafka container version in format 'X.Y.Z'. Use when useTestcontainers is true")
@Nullable
String getKafkaContainerVersion();
void setKafkaContainerVersion(String kafkaContainerVersion);
}
private static class MapKafkaRecordsToStrings
extends SimpleFunction<KafkaRecord<byte[], byte[]>, String> {
@Override
public String apply(KafkaRecord<byte[], byte[]> input) {
String key = Arrays.toString(input.getKV().getKey());
String value = Arrays.toString(input.getKV().getValue());
return String.format("%s %s", key, value);
}
}
public static String getHashForRecordCount(long recordCount, Map<Long, String> hashes) {
String hash = hashes.get(recordCount);
if (hash == null) {
throw new UnsupportedOperationException(
String.format("No hash for that record count: %s", recordCount));
}
return hash;
}
private static void setupKafkaContainer() {
kafkaContainer =
new KafkaContainer(
DockerImageName.parse("confluentinc/cp-kafka")
.withTag(options.getKafkaContainerVersion()));
kafkaContainer.start();
options.setKafkaBootstrapServerAddresses(kafkaContainer.getBootstrapServers());
}
} |
No side effect, I think. | public void postProcessEnvironment(ConfigurableEnvironment environment, SpringApplication application) {
putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.uri");
putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.tenant-id");
putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.client-id");
putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.client-secret");
putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.managed-identity");
putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.jca.certificates-refresh-interval");
putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.jca.refresh-certificates-when-have-un-trust-certificate");
putEnvironmentPropertyToSystemProperty(environment, "azure.cert-path.well-known");
putEnvironmentPropertyToSystemProperty(environment, "azure.cert-path.custom");
MutablePropertySources propertySources = environment.getPropertySources();
if (KeyVaultKeyStore.KEY_STORE_TYPE.equals(environment.getProperty("server.ssl.key-store-type"))) {
Properties properties = new Properties();
properties.put("server.ssl.key-store", "classpath:keyvault.dummy");
if (hasEmbedTomcat()) {
properties.put("server.ssl.key-store-type", "DKS");
}
propertySources.addFirst(new PropertiesPropertySource("KeyStorePropertySource", properties));
}
if (KeyVaultKeyStore.KEY_STORE_TYPE.equals(environment.getProperty("server.ssl.trust-store-type"))) {
Properties properties = new Properties();
properties.put("server.ssl.trust-store", "classpath:keyvault.dummy");
if (hasEmbedTomcat()) {
properties.put("server.ssl.trust-store-type", "DKS");
}
propertySources.addFirst(new PropertiesPropertySource("TrustStorePropertySource", properties));
}
Security.insertProviderAt(new KeyVaultJcaProvider(), 1);
if (overrideTrustManagerFactory(environment)) {
Security.insertProviderAt(new KeyVaultTrustManagerFactoryProvider(), 1);
}
if (disableHostnameVerification(environment)) {
HttpsURLConnection.setDefaultHostnameVerifier((hostname, session) -> true);
}
} | putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.uri"); | public void postProcessEnvironment(ConfigurableEnvironment environment, SpringApplication application) {
putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.uri");
putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.tenant-id");
putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.client-id");
putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.client-secret");
putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.managed-identity");
putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.jca.certificates-refresh-interval");
putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.jca.refresh-certificates-when-have-un-trust-certificate");
putEnvironmentPropertyToSystemProperty(environment, "azure.cert-path.well-known");
putEnvironmentPropertyToSystemProperty(environment, "azure.cert-path.custom");
MutablePropertySources propertySources = environment.getPropertySources();
if (KeyVaultKeyStore.KEY_STORE_TYPE.equals(environment.getProperty("server.ssl.key-store-type"))) {
Properties properties = new Properties();
properties.put("server.ssl.key-store", "classpath:keyvault.dummy");
if (hasEmbedTomcat()) {
properties.put("server.ssl.key-store-type", "DKS");
}
propertySources.addFirst(new PropertiesPropertySource("KeyStorePropertySource", properties));
}
if (KeyVaultKeyStore.KEY_STORE_TYPE.equals(environment.getProperty("server.ssl.trust-store-type"))) {
Properties properties = new Properties();
properties.put("server.ssl.trust-store", "classpath:keyvault.dummy");
if (hasEmbedTomcat()) {
properties.put("server.ssl.trust-store-type", "DKS");
}
propertySources.addFirst(new PropertiesPropertySource("TrustStorePropertySource", properties));
}
Security.insertProviderAt(new KeyVaultJcaProvider(), 1);
if (overrideTrustManagerFactory(environment)) {
Security.insertProviderAt(new KeyVaultTrustManagerFactoryProvider(), 1);
}
if (disableHostnameVerification(environment)) {
HttpsURLConnection.setDefaultHostnameVerifier((hostname, session) -> true);
}
} | class KeyVaultCertificatesEnvironmentPostProcessor implements EnvironmentPostProcessor {
@Override
/**
* The method is use to make the properties in "application.properties" readable in azure-security-keyvault-jca.
*
* "application.properties" is analyzed by Spring, and azure-security-keyvault-jca does not depends on Spring.
* Put the properties into System.getProperties() can make them readable in azure-security-keyvault-jca.
*/
private void putEnvironmentPropertyToSystemProperty(ConfigurableEnvironment environment, String key) {
Optional.of(key)
.map(environment::getProperty)
.filter(StringUtils::hasText)
.ifPresent(value -> System.getProperties().put(key, value));
}
private boolean hasEmbedTomcat() {
try {
Class.forName("org.apache.tomcat.InstanceManager");
return true;
} catch (ClassNotFoundException ex) {
return false;
}
}
static boolean overrideTrustManagerFactory(ConfigurableEnvironment environment) {
return environmentPropertyIsTrue(environment, "azure.keyvault.jca.overrideTrustManagerFactory")
|| environmentPropertyIsTrue(environment, "azure.keyvault.jca.override-trust-manager-factory");
}
private static boolean disableHostnameVerification(ConfigurableEnvironment environment) {
return environmentPropertyIsTrue(environment, "azure.keyvault.jca.disableHostnameVerification")
|| environmentPropertyIsTrue(environment, "azure.keyvault.jca.disable-hostname-verification");
}
private static boolean environmentPropertyIsTrue(ConfigurableEnvironment environment, String key) {
return Optional.of(key)
.map(environment::getProperty)
.map(Boolean::parseBoolean)
.orElse(false);
}
} | class KeyVaultCertificatesEnvironmentPostProcessor implements EnvironmentPostProcessor {
@Override
/**
* The method is use to make the properties in "application.properties" readable in azure-security-keyvault-jca.
*
* "application.properties" is analyzed by Spring, and azure-security-keyvault-jca does not depends on Spring.
* Put the properties into System.getProperties() can make them readable in azure-security-keyvault-jca.
*/
private void putEnvironmentPropertyToSystemProperty(ConfigurableEnvironment environment, String key) {
Optional.of(key)
.map(environment::getProperty)
.filter(StringUtils::hasText)
.ifPresent(value -> System.getProperties().put(key, value));
}
private boolean hasEmbedTomcat() {
try {
Class.forName("org.apache.tomcat.InstanceManager");
return true;
} catch (ClassNotFoundException ex) {
return false;
}
}
static boolean overrideTrustManagerFactory(ConfigurableEnvironment environment) {
return environmentPropertyIsTrue(environment, "azure.keyvault.jca.overrideTrustManagerFactory")
|| environmentPropertyIsTrue(environment, "azure.keyvault.jca.override-trust-manager-factory");
}
private static boolean disableHostnameVerification(ConfigurableEnvironment environment) {
return environmentPropertyIsTrue(environment, "azure.keyvault.jca.disableHostnameVerification")
|| environmentPropertyIsTrue(environment, "azure.keyvault.jca.disable-hostname-verification");
}
private static boolean environmentPropertyIsTrue(ConfigurableEnvironment environment, String key) {
return Optional.of(key)
.map(environment::getProperty)
.map(Boolean::parseBoolean)
.orElse(false);
}
} |
Could we avoid the lambda? | public Object getReference(Bean<?> bean, Type beanType, CreationalContext<?> ctx) {
Objects.requireNonNull(bean, () -> "Managed Bean [" + beanType + "] is null");
Objects.requireNonNull(ctx, "CreationalContext is null");
if (bean instanceof InjectableBean && ctx instanceof CreationalContextImpl) {
return ArcContainerImpl.instance().beanInstanceHandle((InjectableBean) bean, (CreationalContextImpl) ctx).get();
}
throw new IllegalArgumentException(
"Arguments must be instances of " + InjectableBean.class + " and " + CreationalContextImpl.class + ": \nbean: " + bean + "\nctx: " + ctx);
} | Objects.requireNonNull(bean, () -> "Managed Bean [" + beanType + "] is null"); | public Object getReference(Bean<?> bean, Type beanType, CreationalContext<?> ctx) {
if (bean == null) {
throw new NullPointerException("Managed Bean [" + beanType + "] is null");
}
Objects.requireNonNull(ctx, "CreationalContext is null");
if (bean instanceof InjectableBean && ctx instanceof CreationalContextImpl) {
return ArcContainerImpl.instance().beanInstanceHandle((InjectableBean) bean, (CreationalContextImpl) ctx).get();
}
throw new IllegalArgumentException(
"Arguments must be instances of " + InjectableBean.class + " and " + CreationalContextImpl.class + ": \nbean: " + bean + "\nctx: " + ctx);
} | class BeanManagerImpl implements BeanManager {
static final LazyValue<BeanManagerImpl> INSTANCE = new LazyValue<>(BeanManagerImpl::new);
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
@Override
public Object getInjectableReference(InjectionPoint ij, CreationalContext<?> ctx) {
throw new UnsupportedOperationException();
}
@Override
public <T> CreationalContext<T> createCreationalContext(Contextual<T> contextual) {
return new CreationalContextImpl<>();
}
@Override
public Set<Bean<?>> getBeans(Type beanType, Annotation... qualifiers) {
return ArcContainerImpl.instance().getBeans(Objects.requireNonNull(beanType), qualifiers);
}
@Override
public Set<Bean<?>> getBeans(String name) {
return ArcContainerImpl.instance().getBeans(Objects.requireNonNull(name));
}
@Override
public Bean<?> getPassivationCapableBean(String id) {
throw new UnsupportedOperationException();
}
@Override
public <X> Bean<? extends X> resolve(Set<Bean<? extends X>> beans) {
return ArcContainerImpl.resolve(beans);
}
@Override
public void validate(InjectionPoint injectionPoint) {
throw new UnsupportedOperationException();
}
@Override
public void fireEvent(Object event, Annotation... qualifiers) {
getEvent().select(qualifiers).fire(event);
}
@Override
public <T> Set<ObserverMethod<? super T>> resolveObserverMethods(T event, Annotation... qualifiers) {
throw new UnsupportedOperationException();
}
@Override
public List<Decorator<?>> resolveDecorators(Set<Type> types, Annotation... qualifiers) {
throw new UnsupportedOperationException();
}
@Override
public List<Interceptor<?>> resolveInterceptors(InterceptionType type, Annotation... interceptorBindings) {
return ArcContainerImpl.instance().resolveInterceptors(Objects.requireNonNull(type), interceptorBindings);
}
@Override
public boolean isScope(Class<? extends Annotation> annotationType) {
throw new UnsupportedOperationException();
}
@Override
public boolean isNormalScope(Class<? extends Annotation> annotationType) {
throw new UnsupportedOperationException();
}
@Override
public boolean isPassivatingScope(Class<? extends Annotation> annotationType) {
throw new UnsupportedOperationException();
}
@Override
public boolean isQualifier(Class<? extends Annotation> annotationType) {
throw new UnsupportedOperationException();
}
@Override
public boolean isInterceptorBinding(Class<? extends Annotation> annotationType) {
return annotationType.isAnnotationPresent(InterceptorBinding.class);
}
@Override
public boolean isStereotype(Class<? extends Annotation> annotationType) {
throw new UnsupportedOperationException();
}
@Override
public Set<Annotation> getInterceptorBindingDefinition(Class<? extends Annotation> bindingType) {
throw new UnsupportedOperationException();
}
@Override
public Set<Annotation> getStereotypeDefinition(Class<? extends Annotation> stereotype) {
throw new UnsupportedOperationException();
}
@Override
public boolean areQualifiersEquivalent(Annotation qualifier1, Annotation qualifier2) {
throw new UnsupportedOperationException();
}
@Override
public boolean areInterceptorBindingsEquivalent(Annotation interceptorBinding1, Annotation interceptorBinding2) {
throw new UnsupportedOperationException();
}
@Override
public int getQualifierHashCode(Annotation qualifier) {
throw new UnsupportedOperationException();
}
@Override
public int getInterceptorBindingHashCode(Annotation interceptorBinding) {
throw new UnsupportedOperationException();
}
@Override
public Context getContext(Class<? extends Annotation> scopeType) {
return Arc.container().getContext(scopeType);
}
@Override
public ELResolver getELResolver() {
throw new UnsupportedOperationException();
}
@Override
public ExpressionFactory wrapExpressionFactory(ExpressionFactory expressionFactory) {
throw new UnsupportedOperationException();
}
@Override
public <T> AnnotatedType<T> createAnnotatedType(Class<T> type) {
throw new UnsupportedOperationException();
}
@Override
public <T> InjectionTarget<T> createInjectionTarget(AnnotatedType<T> type) {
throw new UnsupportedOperationException();
}
@Override
public <T> InjectionTargetFactory<T> getInjectionTargetFactory(AnnotatedType<T> annotatedType) {
throw new UnsupportedOperationException();
}
@Override
public <X> ProducerFactory<X> getProducerFactory(AnnotatedField<? super X> field, Bean<X> declaringBean) {
throw new UnsupportedOperationException();
}
@Override
public <X> ProducerFactory<X> getProducerFactory(AnnotatedMethod<? super X> method, Bean<X> declaringBean) {
throw new UnsupportedOperationException();
}
@Override
public <T> BeanAttributes<T> createBeanAttributes(AnnotatedType<T> type) {
throw new UnsupportedOperationException();
}
@Override
public BeanAttributes<?> createBeanAttributes(AnnotatedMember<?> type) {
throw new UnsupportedOperationException();
}
@Override
public <T> Bean<T> createBean(BeanAttributes<T> attributes, Class<T> beanClass, InjectionTargetFactory<T> injectionTargetFactory) {
throw new UnsupportedOperationException();
}
@Override
public <T, X> Bean<T> createBean(BeanAttributes<T> attributes, Class<X> beanClass, ProducerFactory<X> producerFactory) {
throw new UnsupportedOperationException();
}
@Override
public InjectionPoint createInjectionPoint(AnnotatedField<?> field) {
throw new UnsupportedOperationException();
}
@Override
public InjectionPoint createInjectionPoint(AnnotatedParameter<?> parameter) {
throw new UnsupportedOperationException();
}
@Override
public <T extends Extension> T getExtension(Class<T> extensionClass) {
throw new UnsupportedOperationException();
}
@Override
public <T> InterceptionFactory<T> createInterceptionFactory(CreationalContext<T> ctx, Class<T> clazz) {
throw new UnsupportedOperationException();
}
@Override
public Event<Object> getEvent() {
return new EventImpl<>(Object.class, new HashSet<>());
}
@Override
public Instance<Object> createInstance() {
return new InstanceImpl<>(Object.class, null, new CreationalContextImpl<>());
}
} | class BeanManagerImpl implements BeanManager {
static final LazyValue<BeanManagerImpl> INSTANCE = new LazyValue<>(BeanManagerImpl::new);
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
@Override
public Object getInjectableReference(InjectionPoint ij, CreationalContext<?> ctx) {
throw new UnsupportedOperationException();
}
@Override
public <T> CreationalContext<T> createCreationalContext(Contextual<T> contextual) {
return new CreationalContextImpl<>();
}
@Override
public Set<Bean<?>> getBeans(Type beanType, Annotation... qualifiers) {
return ArcContainerImpl.instance().getBeans(Objects.requireNonNull(beanType), qualifiers);
}
@Override
public Set<Bean<?>> getBeans(String name) {
return ArcContainerImpl.instance().getBeans(Objects.requireNonNull(name));
}
@Override
public Bean<?> getPassivationCapableBean(String id) {
throw new UnsupportedOperationException();
}
@Override
public <X> Bean<? extends X> resolve(Set<Bean<? extends X>> beans) {
return ArcContainerImpl.resolve(beans);
}
@Override
public void validate(InjectionPoint injectionPoint) {
throw new UnsupportedOperationException();
}
@Override
public void fireEvent(Object event, Annotation... qualifiers) {
getEvent().select(qualifiers).fire(event);
}
@Override
public <T> Set<ObserverMethod<? super T>> resolveObserverMethods(T event, Annotation... qualifiers) {
throw new UnsupportedOperationException();
}
@Override
public List<Decorator<?>> resolveDecorators(Set<Type> types, Annotation... qualifiers) {
throw new UnsupportedOperationException();
}
@Override
public List<Interceptor<?>> resolveInterceptors(InterceptionType type, Annotation... interceptorBindings) {
return ArcContainerImpl.instance().resolveInterceptors(Objects.requireNonNull(type), interceptorBindings);
}
@Override
public boolean isScope(Class<? extends Annotation> annotationType) {
throw new UnsupportedOperationException();
}
@Override
public boolean isNormalScope(Class<? extends Annotation> annotationType) {
throw new UnsupportedOperationException();
}
@Override
public boolean isPassivatingScope(Class<? extends Annotation> annotationType) {
throw new UnsupportedOperationException();
}
@Override
public boolean isQualifier(Class<? extends Annotation> annotationType) {
throw new UnsupportedOperationException();
}
@Override
public boolean isInterceptorBinding(Class<? extends Annotation> annotationType) {
return annotationType.isAnnotationPresent(InterceptorBinding.class);
}
@Override
public boolean isStereotype(Class<? extends Annotation> annotationType) {
throw new UnsupportedOperationException();
}
@Override
public Set<Annotation> getInterceptorBindingDefinition(Class<? extends Annotation> bindingType) {
throw new UnsupportedOperationException();
}
@Override
public Set<Annotation> getStereotypeDefinition(Class<? extends Annotation> stereotype) {
throw new UnsupportedOperationException();
}
@Override
public boolean areQualifiersEquivalent(Annotation qualifier1, Annotation qualifier2) {
throw new UnsupportedOperationException();
}
@Override
public boolean areInterceptorBindingsEquivalent(Annotation interceptorBinding1, Annotation interceptorBinding2) {
throw new UnsupportedOperationException();
}
@Override
public int getQualifierHashCode(Annotation qualifier) {
throw new UnsupportedOperationException();
}
@Override
public int getInterceptorBindingHashCode(Annotation interceptorBinding) {
throw new UnsupportedOperationException();
}
@Override
public Context getContext(Class<? extends Annotation> scopeType) {
return Arc.container().getContext(scopeType);
}
@Override
public ELResolver getELResolver() {
throw new UnsupportedOperationException();
}
@Override
public ExpressionFactory wrapExpressionFactory(ExpressionFactory expressionFactory) {
throw new UnsupportedOperationException();
}
@Override
public <T> AnnotatedType<T> createAnnotatedType(Class<T> type) {
throw new UnsupportedOperationException();
}
@Override
public <T> InjectionTarget<T> createInjectionTarget(AnnotatedType<T> type) {
throw new UnsupportedOperationException();
}
@Override
public <T> InjectionTargetFactory<T> getInjectionTargetFactory(AnnotatedType<T> annotatedType) {
throw new UnsupportedOperationException();
}
@Override
public <X> ProducerFactory<X> getProducerFactory(AnnotatedField<? super X> field, Bean<X> declaringBean) {
throw new UnsupportedOperationException();
}
@Override
public <X> ProducerFactory<X> getProducerFactory(AnnotatedMethod<? super X> method, Bean<X> declaringBean) {
throw new UnsupportedOperationException();
}
@Override
public <T> BeanAttributes<T> createBeanAttributes(AnnotatedType<T> type) {
throw new UnsupportedOperationException();
}
@Override
public BeanAttributes<?> createBeanAttributes(AnnotatedMember<?> type) {
throw new UnsupportedOperationException();
}
@Override
public <T> Bean<T> createBean(BeanAttributes<T> attributes, Class<T> beanClass, InjectionTargetFactory<T> injectionTargetFactory) {
throw new UnsupportedOperationException();
}
@Override
public <T, X> Bean<T> createBean(BeanAttributes<T> attributes, Class<X> beanClass, ProducerFactory<X> producerFactory) {
throw new UnsupportedOperationException();
}
@Override
public InjectionPoint createInjectionPoint(AnnotatedField<?> field) {
throw new UnsupportedOperationException();
}
@Override
public InjectionPoint createInjectionPoint(AnnotatedParameter<?> parameter) {
throw new UnsupportedOperationException();
}
@Override
public <T extends Extension> T getExtension(Class<T> extensionClass) {
throw new UnsupportedOperationException();
}
@Override
public <T> InterceptionFactory<T> createInterceptionFactory(CreationalContext<T> ctx, Class<T> clazz) {
throw new UnsupportedOperationException();
}
@Override
public Event<Object> getEvent() {
return new EventImpl<>(Object.class, new HashSet<>());
}
@Override
public Instance<Object> createInstance() {
return new InstanceImpl<>(Object.class, null, new CreationalContextImpl<>());
}
} |
Because of possible encoding issues, the `URI(scheme,host,path,fragment)` constructor should be used instead. Just give `null` for `host` and `fragment`. | private ProtectionDomain createDefaultProtectionDomain(Path applicationClasspath) {
URL url = null;
if (applicationClasspath != null) {
try {
URI uri = new URI("file:
url = uri.toURL();
} catch (URISyntaxException | MalformedURLException e) {
log.error("URL codeSource location for path " + applicationClasspath + " could not be created.");
}
}
CodeSource codesource = new CodeSource(url, (Certificate[]) null);
ProtectionDomain protectionDomain = new ProtectionDomain(codesource, null, this, null);
return protectionDomain;
} | URI uri = new URI("file: | private ProtectionDomain createDefaultProtectionDomain(Path applicationClasspath) {
URL url = null;
if (applicationClasspath != null) {
try {
URI uri = new URI("file", null, applicationClasspath.toString(), null);
url = uri.toURL();
} catch (URISyntaxException | MalformedURLException e) {
log.error("URL codeSource location for path " + applicationClasspath + " could not be created.");
}
}
CodeSource codesource = new CodeSource(url, (Certificate[]) null);
ProtectionDomain protectionDomain = new ProtectionDomain(codesource, null, this, null);
return protectionDomain;
} | class to cache", e);
}
}
return data;
}
private String sanitizeName(String name) {
if (name.startsWith("/")) {
return name.substring(1);
}
return name;
} | class to cache", e);
}
}
return data;
}
private String sanitizeName(String name) {
if (name.startsWith("/")) {
return name.substring(1);
}
return name;
} |
(Note that I might be missing something, I'm exhausted :)) | private static String buildConnectionString(RedisConfiguration config, URI host, String clientName) {
final String address = host.toString();
if (address.contains(":
return address;
}
LOGGER.warnf(
"The configuration property quarkus.redis%s.hosts is using the deprecated way of setting up the Redis connection. "
+ "Visit https:
isDefault(clientName) ? "" : "." + clientName);
boolean ssl = false;
if (config.ssl.isPresent()) {
ssl = config.ssl.get();
logDeprecationWarning(clientName, "ssl");
}
final StringBuilder builder = ssl ? new StringBuilder("rediss:
if (config.password.isPresent()) {
builder.append(config.password.get());
builder.append('@');
logDeprecationWarning(clientName, "password");
}
builder.append(host.getHost());
builder.append(':');
builder.append(host.getPort());
builder.append('/');
if (config.database.isPresent()) {
builder.append(config.database.getAsInt());
logDeprecationWarning(clientName, "database");
}
return builder.toString();
} | return address; | private static String buildConnectionString(RedisConfiguration config, URI host, String clientName) {
final String address = host.toString();
if (address.contains(":
return address;
}
LOGGER.warnf(
"The configuration property quarkus.redis%s.hosts is using the deprecated way of setting up the Redis connection. "
+ "Visit https:
isDefault(clientName) ? "" : "." + clientName);
boolean ssl = false;
if (config.ssl.isPresent()) {
ssl = config.ssl.get();
logDeprecationWarning(clientName, "ssl");
}
final StringBuilder builder = ssl ? new StringBuilder("rediss:
if (config.password.isPresent()) {
builder.append(config.password.get());
builder.append('@');
logDeprecationWarning(clientName, "password");
}
builder.append(host.getHost());
builder.append(':');
builder.append(host.getPort());
builder.append('/');
if (config.database.isPresent()) {
builder.append(config.database.getAsInt());
logDeprecationWarning(clientName, "database");
}
return builder.toString();
} | class RedisClientUtil {
public static final String DEFAULT_CLIENT = "<default>";
private static final Logger LOGGER = Logger.getLogger(RedisClientUtil.class);
public static RedisOptions buildOptions(RedisConfiguration redisConfig, String clientName) {
RedisOptions options = new RedisOptions();
options.setType(redisConfig.clientType);
if (RedisClientType.STANDALONE == redisConfig.clientType) {
if (redisConfig.hosts.isPresent() && redisConfig.hosts.get().size() > 1) {
throw new ConfigurationException("Multiple hosts supplied for non clustered configuration");
}
}
if (redisConfig.hosts.isPresent()) {
Set<URI> hosts = redisConfig.hosts.get();
for (URI host : hosts) {
options.addConnectionString(buildConnectionString(redisConfig, host, clientName));
}
}
options.setMaxNestedArrays(redisConfig.maxNestedArrays);
options.setMaxWaitingHandlers(redisConfig.maxWaitingHandlers);
options.setMaxPoolSize(redisConfig.maxPoolSize);
options.setMaxPoolWaiting(redisConfig.maxPoolWaiting);
options.setPoolRecycleTimeout(Math.toIntExact(redisConfig.poolRecycleTimeout.toMillis()));
if (redisConfig.poolCleanerInterval.isPresent()) {
options.setPoolCleanerInterval(Math.toIntExact(redisConfig.poolCleanerInterval.get().toMillis()));
}
if (redisConfig.role.isPresent()) {
options.setRole(redisConfig.role.get());
}
if (redisConfig.masterName.isPresent()) {
options.setMasterName(redisConfig.masterName.get());
}
if (redisConfig.slaves.isPresent()) {
options.setUseSlave(redisConfig.slaves.get());
}
return options;
}
public static boolean isDefault(String clientName) {
return DEFAULT_CLIENT.equals(clientName);
}
public static RedisConfiguration getConfiguration(RedisConfig config, String name) {
return isDefault(name) ? config.defaultClient : config.additionalRedisClients.get(name);
}
/**
* This method was only added to support minimal backward compatibility.
* <p>
* It should be removed in the 1.10 release.
* Follows up https:
*/
/**
* This method was only added to support minimal backward compatibility.
* <p>
* It should be removed in the 1.10 release.
* Follows up https:
*/
private static void logDeprecationWarning(String clientName, String propertyName) {
LOGGER.warnf("The configuration property quarkus.redis%s.%s is deprecated. It will be removed in the future release. "
+ "Visit https:
isDefault(clientName) ? "" : "." + clientName, propertyName);
}
} | class RedisClientUtil {
public static final String DEFAULT_CLIENT = "<default>";
private static final Logger LOGGER = Logger.getLogger(RedisClientUtil.class);
public static RedisOptions buildOptions(RedisConfiguration redisConfig, String clientName) {
RedisOptions options = new RedisOptions();
options.setType(redisConfig.clientType);
if (RedisClientType.STANDALONE == redisConfig.clientType) {
if (redisConfig.hosts.isPresent() && redisConfig.hosts.get().size() > 1) {
throw new ConfigurationException("Multiple hosts supplied for non clustered configuration");
}
}
if (redisConfig.hosts.isPresent()) {
Set<URI> hosts = redisConfig.hosts.get();
for (URI host : hosts) {
options.addConnectionString(buildConnectionString(redisConfig, host, clientName));
}
}
options.setMaxNestedArrays(redisConfig.maxNestedArrays);
options.setMaxWaitingHandlers(redisConfig.maxWaitingHandlers);
options.setMaxPoolSize(redisConfig.maxPoolSize);
options.setMaxPoolWaiting(redisConfig.maxPoolWaiting);
options.setPoolRecycleTimeout(Math.toIntExact(redisConfig.poolRecycleTimeout.toMillis()));
if (redisConfig.poolCleanerInterval.isPresent()) {
options.setPoolCleanerInterval(Math.toIntExact(redisConfig.poolCleanerInterval.get().toMillis()));
}
if (redisConfig.role.isPresent()) {
options.setRole(redisConfig.role.get());
}
if (redisConfig.masterName.isPresent()) {
options.setMasterName(redisConfig.masterName.get());
}
if (redisConfig.slaves.isPresent()) {
options.setUseSlave(redisConfig.slaves.get());
}
return options;
}
public static boolean isDefault(String clientName) {
return DEFAULT_CLIENT.equals(clientName);
}
public static RedisConfiguration getConfiguration(RedisConfig config, String name) {
return isDefault(name) ? config.defaultClient : config.additionalRedisClients.get(name);
}
/**
* @deprecated It should be removed in the 1.10 release.
* This method was only added to support minimal backward compatibility.
* <p>
* Follows up https:
*/
/**
* @deprecated It should be removed in the 1.10 release.
* This method was only added to support minimal backward compatibility.
* <p>
* Follows up https:
*/
private static void logDeprecationWarning(String clientName, String propertyName) {
LOGGER.warnf("The configuration property quarkus.redis%s.%s is deprecated. It will be removed in the future release. "
+ "Visit https:
isDefault(clientName) ? "" : "." + clientName, propertyName);
}
} |