2026-02-09 16:19:57 +01:00

305 lines
11 KiB
Java
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

package cz.trask.migration.impl.v32;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import cz.trask.migration.AbstractProcess;
import cz.trask.migration.model.APIInfo;
import cz.trask.migration.model.APIList;
import cz.trask.migration.model.FileType;
import cz.trask.migration.model.HttpResponse;
import cz.trask.migration.model.TokenResponse;
import cz.trask.migration.model.ZipEntryData;
import cz.trask.migration.model.v32.Subscriptions;
import cz.trask.migration.model.v32.Subscriptions.ApplicationInfo;
import cz.trask.migration.model.v32.Subscriptions.Subscription;
import io.apicurio.registry.rest.client.exception.VersionAlreadyExistsException;
import io.apicurio.registry.rest.v2.beans.ArtifactMetaData;
import io.apicurio.registry.rest.v2.beans.ArtifactReference;
import io.apicurio.registry.rest.v2.beans.VersionSearchResults;
import io.apicurio.registry.types.RuleType;
import lombok.extern.log4j.Log4j2;
@Log4j2
public class Wso2v32ToApicurio extends AbstractProcess {
private final AtomicInteger apiCounter = new AtomicInteger(1);
/**
* Main entry point for the import process.
*
* @throws RuntimeException if any error occurs
*/
public void process() {
try {
log.info("Starting API import to Apicurio from WSO2...");
TokenResponse token = authenticateToWso2AndGetToken(config.getSource());
APIList apis = getList(config.getSource().getPublisherApiUrl(), token);
if (apis == null || apis.getList() == null || apis.getList().length == 0) {
throw new IllegalStateException(
"No APIs to export that match your criteria! Check the name of the API you want to export.");
}
log.info("Found {} APIs", apis.getCount());
int maxThreads = config.getMaxThreads();
ExecutorService executor = Executors.newFixedThreadPool(maxThreads);
for (APIInfo api : apis.getList()) {
final int index = apiCounter.getAndIncrement();
executor.submit(() -> processApi(api, token, index, apis.getCount()));
}
executor.shutdown();
if (!executor.awaitTermination(10, TimeUnit.MINUTES)) {
log.warn("Timeout waiting for API import tasks to finish");
}
log.info("Finished processing APIs.");
} catch (Exception e) {
log.error("Error while exporting APIs.", e);
throw new RuntimeException("Export failed", e);
}
}
/**
* Process a single API fetches the data, creates or updates the corresponding
* artifact in Apicurio.
*/
private void processApi(APIInfo api, TokenResponse tokenResponse, int index, int total) {
long start = System.currentTimeMillis();
String status = api.getLifeCycleStatus();
if (!status.contains("PUBLISHED") && !status.contains("DEPRECATED")) {
log.info("Skipping API {} of {} not published (ID={})", index, total, api.getId());
return;
}
try {
log.info("Processing API {} of {}", index, total);
Map<String, String> httpHeaders = Collections.singletonMap("Authorization",
"Bearer " + tokenResponse.getAccess_token());
// 1) Retrieve basic information
HttpResponse apiInfoResp = makeRequest("GET",
config.getSource().getDevPortalApiUrl() + "/apis/" + api.getId(), httpHeaders,
Collections.emptyMap());
HttpResponse subsResp = makeRequest("GET",
config.getSource().getPublisherApiUrl() + "/subscriptions?apiId=" + api.getId(), httpHeaders,
Collections.emptyMap());
// 2) Export the API as a zip
HttpResponse exportedZip = makeRequest("GET",
config.getSource().getPublisherApiUrl() + "/apis/export?apiId=" + api.getId(), httpHeaders,
Collections.emptyMap(), true);
if (config.isStoreMigratedArtifacts()) {
File tmpFile = new File("tmp/api/", api.getName() + "_" + api.getVersion() + ".zip");
log.info(" - Storing migrated Api file: {}", tmpFile.getAbsolutePath());
FileOutputStream fos = new FileOutputStream(tmpFile);
fos.write(exportedZip.getResponseBytes());
fos.flush();
fos.close();
}
List<ZipEntryData> zipEntries = ZipUtils.extractFilesFromZip(exportedZip.getResponseBytes());
String swagger = null;
for (ZipEntryData e : zipEntries) {
if (e.getType().toString().equals(FileType.OPENAPI.toString())) {
log.debug("Found main API definition file: {}", e.getName());
swagger = new String(e.getContent());
break;
}
}
// 3) Deserialize JSON responses
Map<String, Object> apiMap = mapper.readValue(apiInfoResp.getResponse(), Map.class);
Subscriptions subs = mapper.readValue(subsResp.getResponse(), Subscriptions.class);
@SuppressWarnings("unchecked")
List<String> tagsList = (List<String>) apiMap.get("tags");
// 4) Build the properties map
Map<String, String> props = new LinkedHashMap<>();
props.put("version", api.getVersion());
props.put("status", status);
props.put(PARAM_SOURCE_APIM, VERSION_32);
addSubscriptionsToProps(props, subs);
addEndpointsToProps(props, apiMap);
addTagsToProps(props, tagsList);
// 5) Build the description that contains the publisher & devportal URLs
String baseDesc = api.getDescription() != null ? api.getDescription() : "";
String pubUrl = config.getPatterns().getPublisherUrlPattern().replace("{API_ID}", api.getId());
String devPortUrl = config.getPatterns().getDevPortalUrlPattern().replace("{API_ID}", api.getId());
String fullDesc = baseDesc + " ***** PUBLISHER URL ***** " + pubUrl + " ***** DEVPORTAL URL ***** "
+ devPortUrl;
// 6) Update the swagger with the description and servers
Map<String, Object> swaggerMap = mapperYaml.readValue(swagger, Map.class);
ObjectNode swaggerObj = mapperYaml.valueToTree(swaggerMap);
updateSwagger(swaggerObj, apiMap, fullDesc);
api.setName((String)apiMap.get("name"));
api.setContext((String)apiMap.get("context"));
// 7) Prepare artifact creation/update
String group = config.getApicurio().getDefaultApiGroup();
String mainArtifactId = api.getName() + api.getContext();
VersionSearchResults existingArtifacts;
try {
existingArtifacts = client.listArtifactVersions(group, mainArtifactId, 0, Integer.MAX_VALUE);
} catch (Exception e) {
log.debug("No API {} exists will create it", api.getContext());
existingArtifacts = null;
}
if (existingArtifacts == null) {
// Create new artifact
List<ArtifactReference> references = createReferencesFromZip(zipEntries, api);
addSubscriptionsToReferences(references, subs, api);
ArtifactMetaData meta = client.createArtifact(group, mainArtifactId, api.getVersion(), null, null, null,
api.getName(), fullDesc, null, null, null,
new ByteArrayInputStream(swaggerObj.toString().getBytes()), references);
setArtifactMetaData(meta, api.getName(), fullDesc,props);
// Create the three required rules
createRule(meta, "NONE", RuleType.COMPATIBILITY);
createRule(meta, "NONE", RuleType.VALIDITY);
createRule(meta, "NONE", RuleType.INTEGRITY);
} else {
// Artifact exists check if the version exists
boolean versionExists = false;
try {
client.getArtifactVersionMetaData(group, mainArtifactId, api.getVersion());
versionExists = true;
} catch (Exception e) {
// Version missing will create it below
}
List<ArtifactReference> references = createReferencesFromZip(zipEntries, api);
addSubscriptionsToReferences(references, subs, api);
if (!versionExists) {
ArtifactMetaData meta = client.updateArtifact(group, mainArtifactId, api.getVersion(),
api.getName(), fullDesc, new ByteArrayInputStream(swaggerObj.toString().getBytes()),
references);
setArtifactMetaData(meta, api.getName(), fullDesc, props);
} else {
// Version already exists no action needed
log.warn("API {} with version {} already exists. Skipping import.", api.getContext(),
api.getVersion());
}
}
log.info("Successfully imported API '{}' ({}). Took {} ms", api.getName(), api.getVersion(),
System.currentTimeMillis() - start);
} catch (IOException e) {
log.error("IO error while importing API {}: {}", api.getId(), e.getMessage(), e);
} catch (VersionAlreadyExistsException e) {
log.warn("API version already exists for {}: {}. Skipping.", api.getName(), api.getVersion());
} catch (Exception e) {
log.error("Cannot export API '{}':{}", api.getName(), api.getVersion(), e);
}
}
/* --------------------------------------------------------------------- */
/* Helper methods */
/* --------------------------------------------------------------------- */
private void updateSwagger(ObjectNode swagger, Map<String, Object> apiMap, String description) {
// Update "info.description"
ObjectNode info = (ObjectNode) swagger.get("info");
if (info != null) {
info.put("description", description);
}
// Build "servers" array
ArrayNode servers = mapper.createArrayNode();
List<Map<String, Object>> endpoints = (List<Map<String, Object>>) apiMap.get("endpointURLs");
if (endpoints != null) {
for (Map<String, Object> env : endpoints) {
Map<String, String> urls = (Map<String, String>) env.get("URLs");
if (urls == null || urls.isEmpty())
continue;
ObjectNode server = mapper.createObjectNode();
urls.forEach((k, v) -> {
if (v != null && !v.isBlank()) {
if (k.equals("https") || k.equals("wss")) {
server.put("url", v);
}
}
});
server.put("description", "Gateway: " + env.getOrDefault("environmentName", ""));
servers.add(server);
}
}
// Replace "servers" node
swagger.set("servers", servers);
}
private void addSubscriptionsToProps(Map<String, String> props, Subscriptions subs) {
if (subs == null || subs.getList() == null || subs.getList().isEmpty())
return;
int i = 1;
for (Subscription sub : subs.getList()) {
ApplicationInfo appInfo = sub.getApplicationInfo();
if (appInfo == null)
continue;
props.put("subscription" + i, appInfo.getName() + " (Owner: " + appInfo.getSubscriber() + ")");
i++;
}
}
private void addSubscriptionsToReferences(List<ArtifactReference> references, Subscriptions subs, APIInfo api)
throws Exception {
if (subs == null || subs.getList() == null || subs.getList().isEmpty())
return;
String artifactId = api.getName() + "/" + api.getVersion() + "/" + ARTIFACT_NAME_SUBSCRIPTIONS;
byte[] subsBytes = mapperYaml.writeValueAsBytes(subs);
try (ByteArrayInputStream is = new ByteArrayInputStream(subsBytes)) {
ArtifactMetaData meta = client.createArtifactWithVersion(ARTIFACT_GROUP_SUBSCRIPTIONS, artifactId,
api.getVersion(), is);
Map<String, String> props = new LinkedHashMap<>();
props.put(PARAM_SOURCE_APIM, VERSION_32);
setArtifactMetaData(meta, artifactId, null, props);
}
ArtifactReference ref = new ArtifactReference();
ref.setName(ARTIFACT_NAME_SUBSCRIPTIONS);
ref.setGroupId(ARTIFACT_GROUP_SUBSCRIPTIONS);
ref.setArtifactId(artifactId);
ref.setVersion(api.getVersion());
references.add(ref);
}
}