diff --git a/src/main/java/cz/trask/migration/ApiSync.java b/src/main/java/cz/trask/migration/ApiSync.java index c8f179f..f79038f 100644 --- a/src/main/java/cz/trask/migration/ApiSync.java +++ b/src/main/java/cz/trask/migration/ApiSync.java @@ -3,7 +3,7 @@ package cz.trask.migration; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import cz.trask.migration.impl.v32.ImportToApicurio; +import cz.trask.migration.impl.v32.Wso2v32ToApicurio; import cz.trask.migration.impl.v45.ExportToWso2FromV32; import cz.trask.migration.model.StartParameters; @@ -23,7 +23,7 @@ public class ApiSync { if (sp.getCommand().equalsIgnoreCase("wso2ToApicurio")) { log.info("Import command selected."); - ImportToApicurio imp = new ImportToApicurio(); + Wso2v32ToApicurio imp = new Wso2v32ToApicurio(); imp.process(); } else if (sp.getCommand().equalsIgnoreCase("apicurioToWso2")) { log.info("Export command selected."); diff --git a/src/main/java/cz/trask/migration/impl/v32/ImportToApicurio.java b/src/main/java/cz/trask/migration/impl/v32/Wso2v32ToApicurio.java similarity index 96% rename from src/main/java/cz/trask/migration/impl/v32/ImportToApicurio.java rename to src/main/java/cz/trask/migration/impl/v32/Wso2v32ToApicurio.java index 3547ce8..8d3baec 100644 --- a/src/main/java/cz/trask/migration/impl/v32/ImportToApicurio.java +++ b/src/main/java/cz/trask/migration/impl/v32/Wso2v32ToApicurio.java @@ -1,368 +1,368 @@ -package cz.trask.migration.impl.v32; - -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicInteger; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; - -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.fasterxml.jackson.databind.node.ObjectNode; - -import cz.trask.migration.AbstractProcess; -import cz.trask.migration.model.APIInfo; -import cz.trask.migration.model.APIList; -import cz.trask.migration.model.FileType; -import cz.trask.migration.model.HttpResponse; -import cz.trask.migration.model.TokenResponse; -import cz.trask.migration.model.ZipEntryData; -import cz.trask.migration.model.v32.Subscriptions; -import cz.trask.migration.model.v32.Subscriptions.ApplicationInfo; -import cz.trask.migration.model.v32.Subscriptions.Subscription; -import io.apicurio.registry.rest.client.RegistryClient; -import io.apicurio.registry.rest.client.RegistryClientFactory; -import io.apicurio.registry.rest.client.exception.VersionAlreadyExistsException; -import io.apicurio.registry.rest.v2.beans.ArtifactMetaData; -import io.apicurio.registry.rest.v2.beans.ArtifactReference; -import io.apicurio.registry.rest.v2.beans.EditableMetaData; -import io.apicurio.registry.rest.v2.beans.Rule; -import io.apicurio.registry.rest.v2.beans.VersionSearchResults; -import io.apicurio.registry.types.RuleType; - -public class ImportToApicurio extends AbstractProcess { - - private static final Logger log = LogManager.getLogger(ImportToApicurio.class); - - private final AtomicInteger apiCounter = new AtomicInteger(1); - - private final RegistryClient client; - - public ImportToApicurio() throws Exception { - this.client = RegistryClientFactory.create(config.getApicurio().getApiUrl()); - } - - /** - * Main entry point for the import process. - * - * @throws RuntimeException if any error occurs - */ - public void process() { - try { - log.info("Starting API import to Apicurio from WSO2..."); - - TokenResponse token = authenticateToWso2AndGetToken(); - - APIList apis = getList(config.getSource().getPublisherApiUrl(), token); - if (apis == null || apis.getList() == null || apis.getList().length == 0) { - throw new IllegalStateException( - "No APIs to export that match your criteria! Check the name of the API you want to export."); - } - - log.info("Found {} APIs", apis.getCount()); - - int maxThreads = config.getMaxThreads(); - ExecutorService executor = Executors.newFixedThreadPool(maxThreads); - - for (APIInfo api : apis.getList()) { - final int index = apiCounter.getAndIncrement(); - executor.submit(() -> processApi(api, token, index, apis.getCount())); - } - - executor.shutdown(); - if (!executor.awaitTermination(10, TimeUnit.MINUTES)) { - log.warn("Timeout waiting for API import tasks to finish"); - } - log.info("Finished processing APIs."); - } catch (Exception e) { - log.error("Error while exporting APIs.", e); - throw new RuntimeException("Export failed", e); - } - } - - /** - * Process a single API – fetches the data, creates or updates the corresponding - * artifact in Apicurio. - */ - private void processApi(APIInfo api, TokenResponse tokenResponse, int index, int total) { - long start = System.currentTimeMillis(); - String status = api.getLifeCycleStatus(); - - if (!status.contains("PUBLISHED") && !status.contains("DEPRECATED")) { - log.info("Skipping API {} of {} – not published (ID={})", index, total, api.getId()); - return; - } - - try { - log.info("Processing API {} of {}", index, total); - - Map httpHeaders = Collections.singletonMap("Authorization", - "Bearer " + tokenResponse.getAccess_token()); - - // 1) Retrieve basic information - HttpResponse apiInfoResp = makeRequest("GET", - config.getSource().getDevPortalApiUrl() + "/apis/" + api.getId(), httpHeaders, - Collections.emptyMap()); - - HttpResponse subsResp = makeRequest("GET", - config.getSource().getPublisherApiUrl() + "/subscriptions?apiId=" + api.getId(), httpHeaders, - Collections.emptyMap()); - - // 2) Export the API as a zip - HttpResponse exportedZip = makeRequest("GET", - config.getSource().getPublisherApiUrl() + "/apis/export?apiId=" + api.getId(), httpHeaders, - Collections.emptyMap(), true); - - List zipEntries = ZipUtils.extractFilesFromZip(exportedZip.getResponseBytes()); - - String swagger = null; - - for (ZipEntryData e : zipEntries) { - if (e.getType().toString().equals(FileType.OPENAPI.toString())) { - log.debug("Found main API definition file: {}", e.getName()); - swagger = new String(e.getContent()); - break; - } - } - - // 3) Deserialize JSON responses - Map apiMap = mapper.readValue(apiInfoResp.getResponse(), Map.class); - Subscriptions subs = mapper.readValue(subsResp.getResponse(), Subscriptions.class); - - @SuppressWarnings("unchecked") - List tagsList = (List) apiMap.get("tags"); - - // 4) Build the properties map - Map props = new LinkedHashMap<>(); - props.put("version", api.getVersion()); - props.put("status", status); - props.put(PARAM_SOURCE_APIM, VERSION_32); - addSubscriptionsToProps(props, subs); - addEndpointsToProps(props, apiMap); - addTagsToProps(props, tagsList); - - // 5) Build the description that contains the publisher & devportal URLs - String baseDesc = api.getDescription() != null ? api.getDescription() : ""; - String pubUrl = config.getPatterns().getPublisherUrlPattern().replace("{API_ID}", api.getId()); - String devPortUrl = config.getPatterns().getDevPortalUrlPattern().replace("{API_ID}", api.getId()); - - String fullDesc = baseDesc + " ***** PUBLISHER URL ***** " + pubUrl + " ***** DEVPORTAL URL ***** " - + devPortUrl; - - // 6) Update the swagger with the description and servers - Map swaggerMap = mapperYaml.readValue(swagger, Map.class); - ObjectNode swaggerObj = mapperYaml.valueToTree(swaggerMap); - updateSwagger(swaggerObj, apiMap, fullDesc); - - // 7) Prepare artifact creation/update - String group = config.getApicurio().getDefaultApiGroup(); - String mainArtifactId = api.getName() + api.getContext(); - - VersionSearchResults existingArtifacts; - try { - existingArtifacts = client.listArtifactVersions(group, mainArtifactId, 0, Integer.MAX_VALUE); - } catch (Exception e) { - log.debug("No API {} exists – will create it", api.getContext()); - existingArtifacts = null; - } - - if (existingArtifacts == null) { - // Create new artifact - List references = createReferencesFromZip(zipEntries, group, api); - addSubscriptionsToReferences(references, subs, api); - - ArtifactMetaData meta = client.createArtifact(group, mainArtifactId, api.getVersion(), null, null, null, - api.getName(), fullDesc, null, null, null, - new ByteArrayInputStream(swaggerObj.toString().getBytes()), references); - - setArtifactMetaData(meta, props); - // Create the three required rules - createRule(meta, "NONE", RuleType.COMPATIBILITY); - createRule(meta, "NONE", RuleType.VALIDITY); - createRule(meta, "NONE", RuleType.INTEGRITY); - - } else { - // Artifact exists – check if the version exists - boolean versionExists = false; - try { - client.getArtifactVersionMetaData(group, mainArtifactId, api.getVersion()); - versionExists = true; - } catch (Exception e) { - // Version missing – will create it below - } - - List references = createReferencesFromZip(zipEntries, group, api); - addSubscriptionsToReferences(references, subs, api); - - if (!versionExists) { - ArtifactMetaData meta = client.updateArtifact(group, mainArtifactId, api.getVersion(), - api.getName(), fullDesc, new ByteArrayInputStream(swaggerObj.toString().getBytes()), - references); - setArtifactMetaData(meta, props); - } else { - // Version already exists – no action needed - log.warn("API {} with version {} already exists. Skipping import.", api.getContext(), - api.getVersion()); - } - } - - log.info("Successfully imported API '{}' ({}). Took {} ms", api.getName(), api.getVersion(), - System.currentTimeMillis() - start); - } catch (IOException e) { - log.error("IO error while importing API {}: {}", api.getId(), e.getMessage(), e); - } catch (VersionAlreadyExistsException e) { - log.warn("API version already exists for {}: {}. Skipping.", api.getName(), api.getVersion()); - } catch (Exception e) { - log.error("Cannot export API '{}':{}", api.getName(), api.getVersion(), e); - } - } - - /* --------------------------------------------------------------------- */ - /* Helper methods */ - /* --------------------------------------------------------------------- */ - - private void updateSwagger(ObjectNode swagger, Map apiMap, String description) { - // Update "info.description" - ObjectNode info = (ObjectNode) swagger.get("info"); - if (info != null) { - info.put("description", description); - } - - // Build "servers" array - ArrayNode servers = mapper.createArrayNode(); - - List> endpoints = (List>) apiMap.get("endpointURLs"); - if (endpoints != null) { - for (Map env : endpoints) { - Map urls = (Map) env.get("URLs"); - if (urls == null || urls.isEmpty()) - continue; - - ObjectNode server = mapper.createObjectNode(); - urls.forEach((k, v) -> { - if (v != null && !v.isBlank()) { - if (k.equals("https") || k.equals("wss")) { - server.put("url", v); - } - } - }); - server.put("description", "Gateway: " + env.getOrDefault("environmentName", "")); - servers.add(server); - } - } - - // Replace "servers" node - swagger.set("servers", servers); - } - - private void addSubscriptionsToProps(Map props, Subscriptions subs) { - if (subs == null || subs.getList() == null || subs.getList().isEmpty()) - return; - int i = 1; - for (Subscription sub : subs.getList()) { - ApplicationInfo appInfo = sub.getApplicationInfo(); - if (appInfo == null) - continue; - props.put("subscription" + i, appInfo.getName() + " (Owner: " + appInfo.getSubscriber() + ")"); - i++; - } - } - - private void addEndpointsToProps(Map props, Map apiMap) { - if (apiMap == null || !apiMap.containsKey("endpointURLs")) - return; - @SuppressWarnings("unchecked") - List> envs = (List>) apiMap.get("endpointURLs"); - for (Map env : envs) { - @SuppressWarnings("unchecked") - Map urls = (Map) env.get("URLs"); - if (urls == null) - continue; - urls.forEach((k, v) -> { - if (v != null) - props.put(k + " Endpoint", v); - }); - } - } - - private void addTagsToProps(Map props, List tags) { - if (tags != null && !tags.isEmpty()) { - props.put("tags", String.join(", ", tags)); - } - } - - private List createReferencesFromZip(List zipEntries, String group, APIInfo api) - throws IOException { - - List references = new ArrayList<>(); - for (ZipEntryData entry : zipEntries) { - String artifactId = api.getName() + "/" + api.getVersion() + "/" + entry.getName(); - - try (ByteArrayInputStream is = new ByteArrayInputStream(entry.getContent())) { - ArtifactMetaData meta = client.createArtifactWithVersion(entry.getType().toString(), artifactId, - api.getVersion(), is); - Map props = new LinkedHashMap<>(); - props.put(PARAM_SOURCE_APIM, VERSION_32); - setArtifactMetaData(meta, props); - } - - ArtifactReference ref = new ArtifactReference(); - ref.setName(entry.getName()); - ref.setGroupId(entry.getType().toString()); - ref.setArtifactId(artifactId); - ref.setVersion(api.getVersion()); - references.add(ref); - } - return references; - } - - private void addSubscriptionsToReferences(List references, Subscriptions subs, APIInfo api) - throws Exception { - if (subs == null || subs.getList() == null || subs.getList().isEmpty()) - return; - - String artifactId = api.getName() + "/" + api.getVersion() + "/" + ARTIFACT_NAME_SUBSCRIPTIONS; - - byte[] subsBytes = mapperYaml.writeValueAsBytes(subs); - - try (ByteArrayInputStream is = new ByteArrayInputStream(subsBytes)) { - ArtifactMetaData meta = client.createArtifactWithVersion(ARTIFACT_GROUP_SUBSCRIPTIONS, artifactId, - api.getVersion(), is); - Map props = new LinkedHashMap<>(); - props.put(PARAM_SOURCE_APIM, VERSION_32); - setArtifactMetaData(meta, props); - } - - ArtifactReference ref = new ArtifactReference(); - ref.setName(ARTIFACT_NAME_SUBSCRIPTIONS); - ref.setGroupId(ARTIFACT_GROUP_SUBSCRIPTIONS); - ref.setArtifactId(artifactId); - ref.setVersion(api.getVersion()); - references.add(ref); - } - - private void setArtifactMetaData(ArtifactMetaData meta, Map props) { - EditableMetaData metaData = new EditableMetaData(); - metaData.setName(meta.getName()); - metaData.setDescription(meta.getDescription()); - if (props != null) - metaData.setProperties(props); - - client.updateArtifactMetaData(meta.getGroupId(), meta.getId(), metaData); - } - - private void createRule(ArtifactMetaData meta, String config, RuleType type) { - Rule rule = new Rule(); - rule.setConfig(config); - rule.setType(type); - client.createArtifactRule(meta.getGroupId(), meta.getId(), rule); - } -} +package cz.trask.migration.impl.v32; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; + +import cz.trask.migration.AbstractProcess; +import cz.trask.migration.model.APIInfo; +import cz.trask.migration.model.APIList; +import cz.trask.migration.model.FileType; +import cz.trask.migration.model.HttpResponse; +import cz.trask.migration.model.TokenResponse; +import cz.trask.migration.model.ZipEntryData; +import cz.trask.migration.model.v32.Subscriptions; +import cz.trask.migration.model.v32.Subscriptions.ApplicationInfo; +import cz.trask.migration.model.v32.Subscriptions.Subscription; +import io.apicurio.registry.rest.client.RegistryClient; +import io.apicurio.registry.rest.client.RegistryClientFactory; +import io.apicurio.registry.rest.client.exception.VersionAlreadyExistsException; +import io.apicurio.registry.rest.v2.beans.ArtifactMetaData; +import io.apicurio.registry.rest.v2.beans.ArtifactReference; +import io.apicurio.registry.rest.v2.beans.EditableMetaData; +import io.apicurio.registry.rest.v2.beans.Rule; +import io.apicurio.registry.rest.v2.beans.VersionSearchResults; +import io.apicurio.registry.types.RuleType; + +public class Wso2v32ToApicurio extends AbstractProcess { + + private static final Logger log = LogManager.getLogger(Wso2v32ToApicurio.class); + + private final AtomicInteger apiCounter = new AtomicInteger(1); + + private final RegistryClient client; + + public Wso2v32ToApicurio() throws Exception { + this.client = RegistryClientFactory.create(config.getApicurio().getApiUrl()); + } + + /** + * Main entry point for the import process. + * + * @throws RuntimeException if any error occurs + */ + public void process() { + try { + log.info("Starting API import to Apicurio from WSO2..."); + + TokenResponse token = authenticateToWso2AndGetToken(); + + APIList apis = getList(config.getSource().getPublisherApiUrl(), token); + if (apis == null || apis.getList() == null || apis.getList().length == 0) { + throw new IllegalStateException( + "No APIs to export that match your criteria! Check the name of the API you want to export."); + } + + log.info("Found {} APIs", apis.getCount()); + + int maxThreads = config.getMaxThreads(); + ExecutorService executor = Executors.newFixedThreadPool(maxThreads); + + for (APIInfo api : apis.getList()) { + final int index = apiCounter.getAndIncrement(); + executor.submit(() -> processApi(api, token, index, apis.getCount())); + } + + executor.shutdown(); + if (!executor.awaitTermination(10, TimeUnit.MINUTES)) { + log.warn("Timeout waiting for API import tasks to finish"); + } + log.info("Finished processing APIs."); + } catch (Exception e) { + log.error("Error while exporting APIs.", e); + throw new RuntimeException("Export failed", e); + } + } + + /** + * Process a single API – fetches the data, creates or updates the corresponding + * artifact in Apicurio. + */ + private void processApi(APIInfo api, TokenResponse tokenResponse, int index, int total) { + long start = System.currentTimeMillis(); + String status = api.getLifeCycleStatus(); + + if (!status.contains("PUBLISHED") && !status.contains("DEPRECATED")) { + log.info("Skipping API {} of {} – not published (ID={})", index, total, api.getId()); + return; + } + + try { + log.info("Processing API {} of {}", index, total); + + Map httpHeaders = Collections.singletonMap("Authorization", + "Bearer " + tokenResponse.getAccess_token()); + + // 1) Retrieve basic information + HttpResponse apiInfoResp = makeRequest("GET", + config.getSource().getDevPortalApiUrl() + "/apis/" + api.getId(), httpHeaders, + Collections.emptyMap()); + + HttpResponse subsResp = makeRequest("GET", + config.getSource().getPublisherApiUrl() + "/subscriptions?apiId=" + api.getId(), httpHeaders, + Collections.emptyMap()); + + // 2) Export the API as a zip + HttpResponse exportedZip = makeRequest("GET", + config.getSource().getPublisherApiUrl() + "/apis/export?apiId=" + api.getId(), httpHeaders, + Collections.emptyMap(), true); + + List zipEntries = ZipUtils.extractFilesFromZip(exportedZip.getResponseBytes()); + + String swagger = null; + + for (ZipEntryData e : zipEntries) { + if (e.getType().toString().equals(FileType.OPENAPI.toString())) { + log.debug("Found main API definition file: {}", e.getName()); + swagger = new String(e.getContent()); + break; + } + } + + // 3) Deserialize JSON responses + Map apiMap = mapper.readValue(apiInfoResp.getResponse(), Map.class); + Subscriptions subs = mapper.readValue(subsResp.getResponse(), Subscriptions.class); + + @SuppressWarnings("unchecked") + List tagsList = (List) apiMap.get("tags"); + + // 4) Build the properties map + Map props = new LinkedHashMap<>(); + props.put("version", api.getVersion()); + props.put("status", status); + props.put(PARAM_SOURCE_APIM, VERSION_32); + addSubscriptionsToProps(props, subs); + addEndpointsToProps(props, apiMap); + addTagsToProps(props, tagsList); + + // 5) Build the description that contains the publisher & devportal URLs + String baseDesc = api.getDescription() != null ? api.getDescription() : ""; + String pubUrl = config.getPatterns().getPublisherUrlPattern().replace("{API_ID}", api.getId()); + String devPortUrl = config.getPatterns().getDevPortalUrlPattern().replace("{API_ID}", api.getId()); + + String fullDesc = baseDesc + " ***** PUBLISHER URL ***** " + pubUrl + " ***** DEVPORTAL URL ***** " + + devPortUrl; + + // 6) Update the swagger with the description and servers + Map swaggerMap = mapperYaml.readValue(swagger, Map.class); + ObjectNode swaggerObj = mapperYaml.valueToTree(swaggerMap); + updateSwagger(swaggerObj, apiMap, fullDesc); + + // 7) Prepare artifact creation/update + String group = config.getApicurio().getDefaultApiGroup(); + String mainArtifactId = api.getName() + api.getContext(); + + VersionSearchResults existingArtifacts; + try { + existingArtifacts = client.listArtifactVersions(group, mainArtifactId, 0, Integer.MAX_VALUE); + } catch (Exception e) { + log.debug("No API {} exists – will create it", api.getContext()); + existingArtifacts = null; + } + + if (existingArtifacts == null) { + // Create new artifact + List references = createReferencesFromZip(zipEntries, group, api); + addSubscriptionsToReferences(references, subs, api); + + ArtifactMetaData meta = client.createArtifact(group, mainArtifactId, api.getVersion(), null, null, null, + api.getName(), fullDesc, null, null, null, + new ByteArrayInputStream(swaggerObj.toString().getBytes()), references); + + setArtifactMetaData(meta, props); + // Create the three required rules + createRule(meta, "NONE", RuleType.COMPATIBILITY); + createRule(meta, "NONE", RuleType.VALIDITY); + createRule(meta, "NONE", RuleType.INTEGRITY); + + } else { + // Artifact exists – check if the version exists + boolean versionExists = false; + try { + client.getArtifactVersionMetaData(group, mainArtifactId, api.getVersion()); + versionExists = true; + } catch (Exception e) { + // Version missing – will create it below + } + + List references = createReferencesFromZip(zipEntries, group, api); + addSubscriptionsToReferences(references, subs, api); + + if (!versionExists) { + ArtifactMetaData meta = client.updateArtifact(group, mainArtifactId, api.getVersion(), + api.getName(), fullDesc, new ByteArrayInputStream(swaggerObj.toString().getBytes()), + references); + setArtifactMetaData(meta, props); + } else { + // Version already exists – no action needed + log.warn("API {} with version {} already exists. Skipping import.", api.getContext(), + api.getVersion()); + } + } + + log.info("Successfully imported API '{}' ({}). Took {} ms", api.getName(), api.getVersion(), + System.currentTimeMillis() - start); + } catch (IOException e) { + log.error("IO error while importing API {}: {}", api.getId(), e.getMessage(), e); + } catch (VersionAlreadyExistsException e) { + log.warn("API version already exists for {}: {}. Skipping.", api.getName(), api.getVersion()); + } catch (Exception e) { + log.error("Cannot export API '{}':{}", api.getName(), api.getVersion(), e); + } + } + + /* --------------------------------------------------------------------- */ + /* Helper methods */ + /* --------------------------------------------------------------------- */ + + private void updateSwagger(ObjectNode swagger, Map apiMap, String description) { + // Update "info.description" + ObjectNode info = (ObjectNode) swagger.get("info"); + if (info != null) { + info.put("description", description); + } + + // Build "servers" array + ArrayNode servers = mapper.createArrayNode(); + + List> endpoints = (List>) apiMap.get("endpointURLs"); + if (endpoints != null) { + for (Map env : endpoints) { + Map urls = (Map) env.get("URLs"); + if (urls == null || urls.isEmpty()) + continue; + + ObjectNode server = mapper.createObjectNode(); + urls.forEach((k, v) -> { + if (v != null && !v.isBlank()) { + if (k.equals("https") || k.equals("wss")) { + server.put("url", v); + } + } + }); + server.put("description", "Gateway: " + env.getOrDefault("environmentName", "")); + servers.add(server); + } + } + + // Replace "servers" node + swagger.set("servers", servers); + } + + private void addSubscriptionsToProps(Map props, Subscriptions subs) { + if (subs == null || subs.getList() == null || subs.getList().isEmpty()) + return; + int i = 1; + for (Subscription sub : subs.getList()) { + ApplicationInfo appInfo = sub.getApplicationInfo(); + if (appInfo == null) + continue; + props.put("subscription" + i, appInfo.getName() + " (Owner: " + appInfo.getSubscriber() + ")"); + i++; + } + } + + private void addEndpointsToProps(Map props, Map apiMap) { + if (apiMap == null || !apiMap.containsKey("endpointURLs")) + return; + @SuppressWarnings("unchecked") + List> envs = (List>) apiMap.get("endpointURLs"); + for (Map env : envs) { + @SuppressWarnings("unchecked") + Map urls = (Map) env.get("URLs"); + if (urls == null) + continue; + urls.forEach((k, v) -> { + if (v != null) + props.put(k + " Endpoint", v); + }); + } + } + + private void addTagsToProps(Map props, List tags) { + if (tags != null && !tags.isEmpty()) { + props.put("tags", String.join(", ", tags)); + } + } + + private List createReferencesFromZip(List zipEntries, String group, APIInfo api) + throws IOException { + + List references = new ArrayList<>(); + for (ZipEntryData entry : zipEntries) { + String artifactId = api.getName() + "/" + api.getVersion() + "/" + entry.getName(); + + try (ByteArrayInputStream is = new ByteArrayInputStream(entry.getContent())) { + ArtifactMetaData meta = client.createArtifactWithVersion(entry.getType().toString(), artifactId, + api.getVersion(), is); + Map props = new LinkedHashMap<>(); + props.put(PARAM_SOURCE_APIM, VERSION_32); + setArtifactMetaData(meta, props); + } + + ArtifactReference ref = new ArtifactReference(); + ref.setName(entry.getName()); + ref.setGroupId(entry.getType().toString()); + ref.setArtifactId(artifactId); + ref.setVersion(api.getVersion()); + references.add(ref); + } + return references; + } + + private void addSubscriptionsToReferences(List references, Subscriptions subs, APIInfo api) + throws Exception { + if (subs == null || subs.getList() == null || subs.getList().isEmpty()) + return; + + String artifactId = api.getName() + "/" + api.getVersion() + "/" + ARTIFACT_NAME_SUBSCRIPTIONS; + + byte[] subsBytes = mapperYaml.writeValueAsBytes(subs); + + try (ByteArrayInputStream is = new ByteArrayInputStream(subsBytes)) { + ArtifactMetaData meta = client.createArtifactWithVersion(ARTIFACT_GROUP_SUBSCRIPTIONS, artifactId, + api.getVersion(), is); + Map props = new LinkedHashMap<>(); + props.put(PARAM_SOURCE_APIM, VERSION_32); + setArtifactMetaData(meta, props); + } + + ArtifactReference ref = new ArtifactReference(); + ref.setName(ARTIFACT_NAME_SUBSCRIPTIONS); + ref.setGroupId(ARTIFACT_GROUP_SUBSCRIPTIONS); + ref.setArtifactId(artifactId); + ref.setVersion(api.getVersion()); + references.add(ref); + } + + private void setArtifactMetaData(ArtifactMetaData meta, Map props) { + EditableMetaData metaData = new EditableMetaData(); + metaData.setName(meta.getName()); + metaData.setDescription(meta.getDescription()); + if (props != null) + metaData.setProperties(props); + + client.updateArtifactMetaData(meta.getGroupId(), meta.getId(), metaData); + } + + private void createRule(ArtifactMetaData meta, String config, RuleType type) { + Rule rule = new Rule(); + rule.setConfig(config); + rule.setType(type); + client.createArtifactRule(meta.getGroupId(), meta.getId(), rule); + } +} diff --git a/src/main/resources/apicurio-migrator.yaml b/src/main/resources/apicurio-migrator.yaml index 860ade7..f644a0d 100644 --- a/src/main/resources/apicurio-migrator.yaml +++ b/src/main/resources/apicurio-migrator.yaml @@ -1,8 +1,8 @@ source: - registration_api_url: https://localhost:9443/client-registration/v0.17/register - publisher_api_url: https://localhost:9443/api/am/publisher - devportal_api_url: https://localhost:9443/api/am/store - publisher_token_url: https://localhost:9443/oauth2/token + registration_api_url: https://localhost:9444/client-registration/v0.17/register + publisher_api_url: https://localhost:9444/api/am/publisher + devportal_api_url: https://localhost:9444/api/am/store + publisher_token_url: https://localhost:9444/oauth2/token wso2_user: YWRtaW46YWRtaW4= target: