diff --git a/.github/workflows/connector_checklist.yml b/.github/workflows/connector_checklist.yml deleted file mode 100644 index 127ac5ae521d..000000000000 --- a/.github/workflows/connector_checklist.yml +++ /dev/null @@ -1,23 +0,0 @@ -name: Add Connector Merge Checklist -on: - pull_request_target: - types: [opened, reopened] - paths: - - "airbyte-integrations/connectors/source-**" - - "airbyte-integrations/connectors/destination-**" - - "airbyte-integrations/connectors/third-party/**" -jobs: - checklist_job: - name: Add Connector Merge Checklist Job - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v3 - - name: Checklist - uses: wyozi/contextual-qa-checklist-action@master - with: - comment-header: "### Before Merging a Connector Pull Request \n\n Wow! What a great pull request you have here! 🎉 \n\n To merge this PR, ensure the following has been done/considered for each connector added or updated: \n\n" - comment-footer: "If the checklist is complete, but the CI check is failing, \n\n1. Check for hidden checklists in your PR description \n\n2. Toggle the github label `checklist-action-run` on/off to re-run the checklist CI." - show-paths: false - input-file: airbyte-ci/connectors/CONNECTOR_CHECKLIST.yaml - gh-token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/connector_checklist_require.yml b/.github/workflows/connector_checklist_require.yml deleted file mode 100644 index 073cc69842c7..000000000000 --- a/.github/workflows/connector_checklist_require.yml +++ /dev/null @@ -1,31 +0,0 @@ -name: Require Connector Checklist -on: - pull_request: - types: - [ - opened, - edited, - synchronize, - labeled, - unlabeled, - reopened, - ready_for_review, - ] - paths: - - "airbyte-integrations/connectors/source-**" - - "airbyte-integrations/connectors/destination-**" - - "airbyte-integrations/connectors/third-party/**" -jobs: - check_for_required: - name: Require Connector Merge Checklist Job - runs-on: ubuntu-latest - steps: - - name: Ensure All Checklist Checked - uses: mheap/require-checklist-action@v2 - with: - requireChecklist: false # TODO (ben) reenable in one week once pull request templates have been updated - - name: Send Error Message - if: failure() - run: | - echo "::error::All checklist items not checked. Review your PR description and comments for unchecked items." - exit 1 diff --git a/.github/workflows/connector_teams_review_requirements.yml b/.github/workflows/connector_teams_review_requirements.yml index 3964f748955e..206e7e46e91d 100644 --- a/.github/workflows/connector_teams_review_requirements.yml +++ b/.github/workflows/connector_teams_review_requirements.yml @@ -9,9 +9,11 @@ on: - synchronize paths: - "airbyte-integrations/connectors/source-**" + - "airbyte-integrations/connectors/destination-**" pull_request_review: paths: - "airbyte-integrations/connectors/source-**" + - "airbyte-integrations/connectors/destination-**" jobs: check-review-requirements: name: "Check if a review is required from Connector teams" diff --git a/.github/workflows/gradle.yml b/.github/workflows/gradle.yml index 732f0e506695..c25e6d404b80 100644 --- a/.github/workflows/gradle.yml +++ b/.github/workflows/gradle.yml @@ -45,13 +45,19 @@ jobs: uses: tj-actions/changed-files@v39 id: changes with: + # Include java connectors and java CDK. + # Adding all *.java and *.gradle files gets us most of the way there. + # We're take a bit more strict for the java CDK, to make sure that + # the tests run when they should, for instance when changing the contents + # of a text file used as a resource. files_yaml: | gradlecheck: - - '**/*' - - '!**/*.md' - - '!.github/*' + - '**/*.java' + - '**/*.gradle' + - 'airbyte-cdk/java/**/*' - uses: actions/setup-java@v3 + if: steps.changes.outputs.gradlecheck_any_changed == 'true' with: distribution: "zulu" java-version: "21" @@ -62,6 +68,7 @@ jobs: if: steps.changes.outputs.gradlecheck_any_changed == 'true' run: python3 -m pip install virtualenv --user - name: Docker login + if: steps.changes.outputs.gradlecheck_any_changed == 'true' # Some tests use testcontainers which pull images from DockerHub. uses: docker/login-action@v1 with: diff --git a/airbyte-cdk/java/airbyte-cdk/README.md b/airbyte-cdk/java/airbyte-cdk/README.md index 1aa24c5c29ec..c3c8979e897b 100644 --- a/airbyte-cdk/java/airbyte-cdk/README.md +++ b/airbyte-cdk/java/airbyte-cdk/README.md @@ -166,6 +166,8 @@ MavenLocal debugging steps: | Version | Date | Pull Request | Subject | |:--------|:-----------|:-----------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 0.21.3 | 2024-02-20 | [\#35394](https://github.com/airbytehq/airbyte/pull/35394) | Add Junit progress information to the test logs | +| 0.21.2 | 2024-02-20 | [\#34978](https://github.com/airbytehq/airbyte/pull/34978) | Reduce log noise in NormalizationLogParser. | | 0.21.1 | 2024-02-20 | [\#35199](https://github.com/airbytehq/airbyte/pull/35199) | Add thread names to the logs. | | 0.21.0 | 2024-02-16 | [\#35314](https://github.com/airbytehq/airbyte/pull/35314) | Delete S3StreamCopier classes. These have been superseded by the async destinations framework. | | 0.20.9 | 2024-02-15 | [\#35240](https://github.com/airbytehq/airbyte/pull/35240) | Make state emission to platform inside state manager itself. | diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/normalization/NormalizationLogParser.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/normalization/NormalizationLogParser.java index 698a9b269f22..80940bcf612b 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/normalization/NormalizationLogParser.java +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination/normalization/NormalizationLogParser.java @@ -53,7 +53,7 @@ Stream toMessages(final String line) { if (Strings.isEmpty(line)) { return Stream.of(logMessage(Level.INFO, "")); } - final Optional json = Jsons.tryDeserialize(line); + final Optional json = Jsons.tryDeserializeWithoutWarn(line); if (json.isPresent()) { return jsonToMessage(json.get()); } else { @@ -96,7 +96,7 @@ private Stream jsonToMessage(final JsonNode jsonLine) { */ final String logLevel = (jsonLine.hasNonNull("level")) ? jsonLine.get("level").asText() : ""; String logMsg = jsonLine.hasNonNull("msg") ? jsonLine.get("msg").asText() : ""; - Level level; + final Level level; switch (logLevel) { case "debug" -> level = Level.DEBUG; case "info" -> level = Level.INFO; @@ -117,7 +117,7 @@ private Stream jsonToMessage(final JsonNode jsonLine) { } } - private static AirbyteMessage logMessage(Level level, String message) { + private static AirbyteMessage logMessage(final Level level, final String message) { return new AirbyteMessage() .withType(Type.LOG) .withLog(new AirbyteLogMessage() @@ -125,7 +125,7 @@ private static AirbyteMessage logMessage(Level level, String message) { .withMessage(message)); } - public static void main(String[] args) { + public static void main(final String[] args) { final NormalizationLogParser normalizationLogParser = new NormalizationLogParser(); final Stream airbyteMessageStream = normalizationLogParser.create(new BufferedReader(new InputStreamReader(System.in, StandardCharsets.UTF_8))); @@ -135,8 +135,8 @@ public static void main(String[] args) { final String dbtErrorStack = String.join("\n", errors); if (!"".equals(dbtErrorStack)) { final Map errorMap = SentryExceptionHelper.getUsefulErrorMessageAndTypeFromDbtError(dbtErrorStack); - String internalMessage = errorMap.get(ErrorMapKeys.ERROR_MAP_MESSAGE_KEY); - AirbyteMessage traceMessage = new AirbyteMessage() + final String internalMessage = errorMap.get(ErrorMapKeys.ERROR_MAP_MESSAGE_KEY); + final AirbyteMessage traceMessage = new AirbyteMessage() .withType(Type.TRACE) .withTrace(new AirbyteTraceMessage() .withType(AirbyteTraceMessage.Type.ERROR) diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/state/PartialStateWithDestinationStats.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/state/PartialStateWithDestinationStats.java deleted file mode 100644 index 0edf5fada243..000000000000 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/destination_async/state/PartialStateWithDestinationStats.java +++ /dev/null @@ -1,10 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.cdk.integrations.destination_async.state; - -import io.airbyte.cdk.integrations.destination_async.partial_messages.PartialAirbyteMessage; -import io.airbyte.protocol.models.v0.AirbyteStateStats; - -public record PartialStateWithDestinationStats(PartialAirbyteMessage stateMessage, AirbyteStateStats stats, long stateArrivalNumber) {} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties index bc919963f6f8..177f844ea34a 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties @@ -1 +1 @@ -version=0.21.1 +version=0.21.3 diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/AirbyteLogMessageTemplateTest.java b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/AirbyteLogMessageTemplateTest.java index 8e9b91c939f8..50307ebd1890 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/AirbyteLogMessageTemplateTest.java +++ b/airbyte-cdk/java/airbyte-cdk/core/src/test/java/io/airbyte/cdk/integrations/base/AirbyteLogMessageTemplateTest.java @@ -25,51 +25,45 @@ import org.apache.logging.log4j.core.config.Configuration; import org.apache.logging.log4j.core.config.Configurator; import org.apache.logging.log4j.core.config.LoggerConfig; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.BeforeEach; +import org.apache.logging.log4j.spi.ExtendedLogger; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; import org.junit.platform.commons.util.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; public class AirbyteLogMessageTemplateTest { - private static final ByteArrayOutputStream outputContent = new ByteArrayOutputStream(); - private static final Logger LOGGER = LoggerFactory.getLogger(AirbyteLogMessageTemplateTest.class); public static final String OUTPUT_STREAM_APPENDER = "OutputStreamAppender"; public static final String CONSOLE_JSON_APPENDER = "ConsoleJSONAppender"; - private static OutputStreamAppender outputStreamAppender; - private static LoggerConfig rootLoggerConfig; - private static LoggerContext loggerContext; + private LoggerContext loggerContext; + private LoggerConfig rootLoggerConfig; + private ExtendedLogger logger; + private OutputStreamAppender outputStreamAppender; + private ByteArrayOutputStream outputContent; - @BeforeAll - static void init() { + void getLogger() { // We are creating a log appender with the same output pattern // as the console json appender defined in this project's log4j2.xml file. // We then attach this log appender with the LOGGER instance so that we can validate the logs // produced by code and assert that it matches the expected format. loggerContext = Configurator.initialize(null, "log4j2.xml"); + final Configuration configuration = loggerContext.getConfiguration(); rootLoggerConfig = configuration.getLoggerConfig(""); + outputContent = new ByteArrayOutputStream(); outputStreamAppender = OutputStreamAppender.createAppender( rootLoggerConfig.getAppenders().get(CONSOLE_JSON_APPENDER).getLayout(), null, outputContent, OUTPUT_STREAM_APPENDER, false, true); outputStreamAppender.start(); rootLoggerConfig.addAppender(outputStreamAppender, Level.ALL, null); + logger = loggerContext.getLogger(AirbyteLogMessageTemplateTest.class); } - @BeforeEach - void setup() { - outputContent.reset(); - } - - @AfterAll - static void cleanUp() { + @AfterEach + void closeLogger() { outputStreamAppender.stop(); rootLoggerConfig.removeAppender(OUTPUT_STREAM_APPENDER); loggerContext.close(); @@ -77,7 +71,8 @@ static void cleanUp() { @Test public void testAirbyteLogMessageFormat() throws java.io.IOException { - LOGGER.info("hello"); + getLogger(); + logger.info("hello"); outputContent.flush(); final String logMessage = outputContent.toString(StandardCharsets.UTF_8); @@ -114,12 +109,13 @@ private AirbyteLogMessage validateAirbyteMessageIsLog(final AirbyteMessage airby @ParameterizedTest @ValueSource(ints = {2, 100, 9000}) - public void testAirbyteLogMessageLength(int stringRepeatitions) throws java.io.IOException { + public void testAirbyteLogMessageLength(int stringRepetitions) throws java.io.IOException { + getLogger(); final StringBuilder sb = new StringBuilder(); - for (int i = 0; i < stringRepeatitions; i++) { + for (int i = 0; i < stringRepetitions; i++) { sb.append("abcd"); } - LOGGER.info(sb.toString(), new RuntimeException("aaaaa bbbbbb ccccccc dddddd")); + logger.info(sb.toString(), new RuntimeException("aaaaa bbbbbb ccccccc dddddd")); outputContent.flush(); final String logMessage = outputContent.toString(StandardCharsets.UTF_8); diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/extensions/LoggingInvocationInterceptor.java b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/extensions/LoggingInvocationInterceptor.java new file mode 100644 index 000000000000..6ccaf15a1255 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/extensions/LoggingInvocationInterceptor.java @@ -0,0 +1,160 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.extensions; + +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationHandler; +import java.lang.reflect.Method; +import java.lang.reflect.Proxy; +import java.time.Duration; +import java.time.Instant; +import java.util.Arrays; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; +import org.apache.commons.lang3.exception.ExceptionUtils; +import org.junit.jupiter.api.extension.DynamicTestInvocationContext; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.api.extension.InvocationInterceptor; +import org.junit.jupiter.api.extension.ReflectiveInvocationContext; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * By default, junit only output logs to the console, and nothing makes it into log4j logs. This + * class fixes that by using the interceptor facility to print progress and timing information. This + * allows us to have junit loglines in our test logs. This is instanciated via Java's + * ServiceLoader The declaration can be found in + * resources/META-INF/services/org.junit.jupiter.api.extension.Extension + */ +public class LoggingInvocationInterceptor implements InvocationInterceptor { + + private static final class LoggingInvocationInterceptorHandler implements InvocationHandler { + + private static final Logger LOGGER = LoggerFactory.getLogger(LoggingInvocationInterceptor.class); + + private static final Pattern methodPattern = Pattern.compile("intercept(.*)Method"); + + @Override + @SuppressWarnings("unchecked") + public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { + if (LoggingInvocationInterceptor.class.getDeclaredMethod(method.getName(), Invocation.class, ReflectiveInvocationContext.class, + ExtensionContext.class) == null) { + LOGGER.error("Junit LoggingInvocationInterceptor executing unknown interception point {}", method.getName()); + return method.invoke(proxy, args); + } + var invocation = (Invocation) args[0]; + var invocationContext = (ReflectiveInvocationContext) args[1]; + var extensionContext = (ExtensionContext) args[2]; + String methodName = method.getName(); + String logLineSuffix; + Matcher methodMatcher = methodPattern.matcher(methodName); + if (methodName.equals("interceptDynamicTest")) { + logLineSuffix = "execution of DynamicTest %s".formatted(extensionContext.getDisplayName()); + } else if (methodName.equals("interceptTestClassConstructor")) { + logLineSuffix = "instance creation for %s".formatted(invocationContext.getTargetClass()); + } else if (methodMatcher.matches()) { + String interceptedEvent = methodMatcher.group(1); + logLineSuffix = "execution of @%s method %s.%s".formatted(invocationContext.getExecutable().getDeclaringClass().getSimpleName(), + interceptedEvent, invocationContext.getExecutable().getName()); + } else { + logLineSuffix = "execution of unknown intercepted call %s".formatted(methodName); + } + LOGGER.info("Junit starting {}", logLineSuffix); + try { + Instant start = Instant.now(); + Object retVal = invocation.proceed(); + long elapsedMs = Duration.between(start, Instant.now()).toMillis(); + LOGGER.info("Junit completed {} in {} ms", logLineSuffix, elapsedMs); + return retVal; + } catch (Throwable t) { + String stackTrace = Arrays.stream(ExceptionUtils.getStackFrames(t)).takeWhile(s -> !s.startsWith("\tat org.junit")).collect( + Collectors.joining("\n ")); + LOGGER.warn("Junit exception throw during {}:\n{}", logLineSuffix, stackTrace); + throw t; + } + } + + } + + private final InvocationInterceptor proxy = (InvocationInterceptor) Proxy.newProxyInstance( + getClass().getClassLoader(), + new Class[] {InvocationInterceptor.class}, + new LoggingInvocationInterceptorHandler()); + + @Override + public void interceptAfterAllMethod(Invocation invocation, + ReflectiveInvocationContext invocationContext, + ExtensionContext extensionContext) + throws Throwable { + proxy.interceptAfterAllMethod(invocation, invocationContext, extensionContext); + } + + @Override + public void interceptAfterEachMethod(Invocation invocation, + ReflectiveInvocationContext invocationContext, + ExtensionContext extensionContext) + throws Throwable { + proxy.interceptAfterEachMethod(invocation, invocationContext, extensionContext); + } + + @Override + public void interceptBeforeAllMethod(Invocation invocation, + ReflectiveInvocationContext invocationContext, + ExtensionContext extensionContext) + throws Throwable { + proxy.interceptBeforeAllMethod(invocation, invocationContext, extensionContext); + } + + @Override + public void interceptBeforeEachMethod(Invocation invocation, + ReflectiveInvocationContext invocationContext, + ExtensionContext extensionContext) + throws Throwable { + proxy.interceptBeforeEachMethod(invocation, invocationContext, extensionContext); + } + + @Override + public void interceptDynamicTest(Invocation invocation, + DynamicTestInvocationContext invocationContext, + ExtensionContext extensionContext) + throws Throwable { + proxy.interceptDynamicTest(invocation, invocationContext, extensionContext); + } + + @Override + public void interceptTestMethod(Invocation invocation, + ReflectiveInvocationContext invocationContext, + ExtensionContext extensionContext) + throws Throwable { + proxy.interceptTestMethod(invocation, invocationContext, extensionContext); + } + + @Override + public void interceptTestTemplateMethod(Invocation invocation, + ReflectiveInvocationContext invocationContext, + ExtensionContext extensionContext) + throws Throwable { + proxy.interceptTestTemplateMethod(invocation, invocationContext, extensionContext); + } + + @Override + public T interceptTestFactoryMethod(Invocation invocation, + ReflectiveInvocationContext invocationContext, + ExtensionContext extensionContext) + throws Throwable { + return proxy.interceptTestFactoryMethod(invocation, invocationContext, extensionContext); + } + + @Override + public T interceptTestClassConstructor(Invocation invocation, + ReflectiveInvocationContext> invocationContext, + ExtensionContext extensionContext) + throws Throwable { + return proxy.interceptTestClassConstructor(invocation, invocationContext, extensionContext); + } + +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/testutils/ContainerFactory.java b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/testutils/ContainerFactory.java index 932a7dda3ac0..6c0b8e40e89f 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/testutils/ContainerFactory.java +++ b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/java/io/airbyte/cdk/testutils/ContainerFactory.java @@ -12,12 +12,15 @@ import java.util.List; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Supplier; import java.util.stream.Stream; +import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testcontainers.containers.GenericContainer; import org.testcontainers.containers.JdbcDatabaseContainer; +import org.testcontainers.containers.output.OutputFrame; import org.testcontainers.containers.output.Slf4jLogConsumer; import org.testcontainers.utility.DockerImageName; @@ -65,10 +68,13 @@ GenericContainer container() { } private static final ConcurrentMap SHARED_CONTAINERS = new ConcurrentHashMap<>(); + private static final AtomicInteger containerId = new AtomicInteger(0); - private static final MdcScope.Builder TESTCONTAINER_LOG_MDC_BUILDER = new MdcScope.Builder() - .setLogPrefix("testcontainer") - .setPrefixColor(LoggingHelper.Color.RED_BACKGROUND); + private static final MdcScope.Builder getTestContainerLogMdcBuilder(DockerImageName imageName, List methods) { + return new MdcScope.Builder() + .setLogPrefix("testcontainer %s (%s[%s]):".formatted(containerId.incrementAndGet(), imageName, StringUtils.join(methods, ","))) + .setPrefixColor(LoggingHelper.Color.RED_BACKGROUND); + } /** * Creates a new, unshared testcontainer instance. This usually wraps the default constructor for @@ -108,8 +114,16 @@ private GenericContainer createAndStartContainer(DockerImageName imageName, L for (String methodName : methodNames) { methods.add(getClass().getMethod(methodName, container.getClass())); } - final var logConsumer = new Slf4jLogConsumer(LOGGER); - TESTCONTAINER_LOG_MDC_BUILDER.produceMappings(logConsumer::withMdc); + final var logConsumer = new Slf4jLogConsumer(LOGGER) { + + public void accept(OutputFrame frame) { + if (frame.getUtf8StringWithoutLineEnding().trim().length() > 0) { + super.accept(frame); + } + } + + }; + getTestContainerLogMdcBuilder(imageName, methodNames).produceMappings(logConsumer::withMdc); container.withLogConsumer(logConsumer); for (Method method : methods) { LOGGER.info("Calling {} in {} on new shared container based on {}.", diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/resources/META-INF/services/org.junit.jupiter.api.extension.Extension b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/resources/META-INF/services/org.junit.jupiter.api.extension.Extension new file mode 100644 index 000000000000..90378b469448 --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/core/src/testFixtures/resources/META-INF/services/org.junit.jupiter.api.extension.Extension @@ -0,0 +1 @@ +io.airbyte.cdk.extensions.LoggingInvocationInterceptor \ No newline at end of file diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/logging/LoggingHelper.java b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/logging/LoggingHelper.java index e351ca2eff44..d9cca1b78089 100644 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/logging/LoggingHelper.java +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/java/io/airbyte/commons/logging/LoggingHelper.java @@ -41,9 +41,10 @@ public String getCode() { @VisibleForTesting public static final String RESET = "\u001B[0m"; + public static final String PREPARE_COLOR_CHAR = "\u001b[m"; public static String applyColor(final Color color, final String msg) { - return color.getCode() + msg + RESET; + return PREPARE_COLOR_CHAR + color.getCode() + msg + PREPARE_COLOR_CHAR + RESET; } } diff --git a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/resources/log4j2-test.xml b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/resources/log4j2-test.xml index 067f667f6854..5fa11880cfba 100644 --- a/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/resources/log4j2-test.xml +++ b/airbyte-cdk/java/airbyte-cdk/dependencies/src/main/resources/log4j2-test.xml @@ -2,9 +2,9 @@ - %d{yyyy-MM-dd'T'HH:mm:ss,SSS}{GMT+0} %thread `%highlight{%p}`%C{1.}(%M):%L - %replace{%m}{$${env:LOG_SCRUB_PATTERN:-\*\*\*\*\*}}{*****}%n + %d{yyyy-MM-dd'T'HH:mm:ss,SSS}{GMT+0}`%t`%T`%highlight{%p}`%C{1.}(%M):%L - %replace{%m}{$${env:LOG_SCRUB_PATTERN:-\*\*\*\*\*}}{*****}%n - %d{yyyy-MM-dd'T'HH:mm:ss,SSS}{GMT+0}`%replace{ %X{log_source}}{^ -}{} > %replace{%m}{$${env:LOG_SCRUB_PATTERN:-\*\*\*\*\*}}{*****}%n + %d{yyyy-MM-dd'T'HH:mm:ss,SSS}{GMT+0}`%replace{%X{log_source}}{^ -}{} > %replace{%m}{$${env:LOG_SCRUB_PATTERN:-\*\*\*\*\*}}{*****}%n ${sys:LOG_LEVEL:-${env:LOG_LEVEL:-INFO}} ${env:AIRBYTE_LOG_SUBDIR:-${date:yyyy-MM-dd'T'HH:mm:ss}} @@ -13,53 +13,48 @@ - - - - - - - - - - - - + - - - - + - - - - - - - - - + - - + + + + + + + + + + + + + + + + + + + diff --git a/airbyte-cdk/python/.bumpversion.cfg b/airbyte-cdk/python/.bumpversion.cfg index 94bc67c38d2d..a1f0725409dc 100644 --- a/airbyte-cdk/python/.bumpversion.cfg +++ b/airbyte-cdk/python/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.63.2 +current_version = 0.64.0 commit = False [bumpversion:file:setup.py] diff --git a/airbyte-cdk/python/CHANGELOG.md b/airbyte-cdk/python/CHANGELOG.md index 29e2a40bccfa..62616a3086c9 100644 --- a/airbyte-cdk/python/CHANGELOG.md +++ b/airbyte-cdk/python/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 0.64.0 +Low-code: Add filter to RemoveFields + ## 0.63.2 Correct handling of custom max_records limits in connector_builder diff --git a/airbyte-cdk/python/Dockerfile b/airbyte-cdk/python/Dockerfile index 727c254e13c4..6df448cb5535 100644 --- a/airbyte-cdk/python/Dockerfile +++ b/airbyte-cdk/python/Dockerfile @@ -10,7 +10,7 @@ RUN apk --no-cache upgrade \ && apk --no-cache add tzdata build-base # install airbyte-cdk -RUN pip install --prefix=/install airbyte-cdk==0.63.2 +RUN pip install --prefix=/install airbyte-cdk==0.64.0 # build a clean environment FROM base @@ -32,5 +32,5 @@ ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] # needs to be the same as CDK -LABEL io.airbyte.version=0.63.2 +LABEL io.airbyte.version=0.64.0 LABEL io.airbyte.name=airbyte/source-declarative-manifest diff --git a/airbyte-cdk/python/README.md b/airbyte-cdk/python/README.md index fd42dd0f9ff7..b8998a9d8d83 100644 --- a/airbyte-cdk/python/README.md +++ b/airbyte-cdk/python/README.md @@ -65,7 +65,7 @@ pip install -e ".[dev]" # [dev] installs development-only dependencies If the iteration you are working on includes changes to the models, you might want to regenerate them. In order to do that, you can run: ```bash -./gradlew :airbyte-cdk:python:format +./gradlew :airbyte-cdk:python:build ``` This will generate the files based on the schemas, add the license information and format the code. If you want to only do the former and rely on diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_component_schema.yaml b/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_component_schema.yaml index c2ceb343d6f9..50d6db7da017 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_component_schema.yaml +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_component_schema.yaml @@ -1847,6 +1847,19 @@ definitions: type: type: string enum: [RemoveFields] + condition: + description: The predicate to filter a property by a property value. Property will be removed if it is empty OR expression is evaluated to True., + type: string + default: "" + interpolation_context: + - config + - property + - parameters + examples: + - "{{ property|string == '' }}" + - "{{ property is integer }}" + - "{{ property|length > 5 }}" + - "{{ property == 'some_string_to_match' }}" field_pointers: title: Field Paths description: Array of paths defining the field to remove. Each item is an array whose field describe the path of a field to remove. diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/models/declarative_component_schema.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/models/declarative_component_schema.py index c53385bf36af..78006147385f 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/models/declarative_component_schema.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/models/declarative_component_schema.py @@ -579,6 +579,16 @@ class SchemaNormalization(Enum): class RemoveFields(BaseModel): type: Literal['RemoveFields'] + condition: Optional[str] = Field( + '', + description="The predicate to filter a property by a property value. Property will be removed if it is empty OR expression is evaluated to True.", + examples=[ + "{{ property|string == '' }}", + '{{ property is integer }}', + '{{ property|length > 5 }}', + "{{ property == 'some_string_to_match' }}", + ], + ) field_pointers: List[List[str]] = Field( ..., description='Array of paths defining the field to remove. Each item is an array whose field describe the path of a field to remove.', diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py index 03e43e455947..d9f4930d7bdf 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py @@ -909,7 +909,7 @@ def create_record_selector( @staticmethod def create_remove_fields(model: RemoveFieldsModel, config: Config, **kwargs: Any) -> RemoveFields: - return RemoveFields(field_pointers=model.field_pointers, parameters={}) + return RemoveFields(field_pointers=model.field_pointers, condition=model.condition or "", parameters={}) def create_selective_authenticator(self, model: SelectiveAuthenticatorModel, config: Config, **kwargs: Any) -> DeclarativeAuthenticator: authenticators = {name: self._create_component_from_model(model=auth, config=config) for name, auth in model.authenticators.items()} diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/transformations/remove_fields.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/transformations/remove_fields.py index b0d222273ef3..964483dddd87 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/transformations/remove_fields.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/transformations/remove_fields.py @@ -7,6 +7,7 @@ import dpath.exceptions import dpath.util +from airbyte_cdk.sources.declarative.interpolation.interpolated_boolean import InterpolatedBoolean from airbyte_cdk.sources.declarative.transformations import RecordTransformation from airbyte_cdk.sources.declarative.types import Config, FieldPointer, StreamSlice, StreamState @@ -40,6 +41,10 @@ class RemoveFields(RecordTransformation): field_pointers: List[FieldPointer] parameters: InitVar[Mapping[str, Any]] + condition: str = "" + + def __post_init__(self, parameters: Mapping[str, Any]) -> None: + self._filter_interpolator = InterpolatedBoolean(condition=self.condition, parameters=parameters) def transform( self, @@ -55,7 +60,11 @@ def transform( for pointer in self.field_pointers: # the dpath library by default doesn't delete fields from arrays try: - dpath.util.delete(record, pointer) + dpath.util.delete( + record, + pointer, + afilter=(lambda x: self._filter_interpolator.eval(config or {}, property=x)) if self.condition else None, + ) except dpath.exceptions.PathNotFound: # if the (potentially nested) property does not exist, silently skip pass diff --git a/airbyte-cdk/python/setup.py b/airbyte-cdk/python/setup.py index efb68903ff90..86036db6b3ee 100644 --- a/airbyte-cdk/python/setup.py +++ b/airbyte-cdk/python/setup.py @@ -36,7 +36,7 @@ name="airbyte-cdk", # The version of the airbyte-cdk package is used at runtime to validate manifests. That validation must be # updated if our semver format changes such as using release candidate versions. - version="0.63.2", + version="0.64.0", description="A framework for writing Airbyte Connectors.", long_description=README, long_description_content_type="text/markdown", diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/transformations/test_remove_fields.py b/airbyte-cdk/python/unit_tests/sources/declarative/transformations/test_remove_fields.py index 30cd88787935..abcecdd884f8 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/transformations/test_remove_fields.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/transformations/test_remove_fields.py @@ -10,39 +10,79 @@ @pytest.mark.parametrize( - ["input_record", "field_pointers", "expected"], + ["input_record", "field_pointers", "condition", "expected"], [ - pytest.param({"k1": "v", "k2": "v"}, [["k1"]], {"k2": "v"}, id="remove a field that exists (flat dict)"), - pytest.param({"k1": "v", "k2": "v"}, [["k3"]], {"k1": "v", "k2": "v"}, id="remove a field that doesn't exist (flat dict)"), - pytest.param({"k1": "v", "k2": "v"}, [["k1"], ["k2"]], {}, id="remove multiple fields that exist (flat dict)"), + pytest.param({"k1": "v", "k2": "v"}, [["k1"]], None, {"k2": "v"}, id="remove a field that exists (flat dict), condition = None"), + pytest.param({"k1": "v", "k2": "v"}, [["k1"]], "", {"k2": "v"}, id="remove a field that exists (flat dict)"), + pytest.param({"k1": "v", "k2": "v"}, [["k3"]], "", {"k1": "v", "k2": "v"}, id="remove a field that doesn't exist (flat dict)"), + pytest.param({"k1": "v", "k2": "v"}, [["k1"], ["k2"]], "", {}, id="remove multiple fields that exist (flat dict)"), # TODO: should we instead splice the element out of the array? I think that's the more intuitive solution # Otherwise one could just set the field's value to null. - pytest.param({"k1": [1, 2]}, [["k1", 0]], {"k1": [None, 2]}, id="remove field inside array (int index)"), - pytest.param({"k1": [1, 2]}, [["k1", "0"]], {"k1": [None, 2]}, id="remove field inside array (string index)"), + pytest.param({"k1": [1, 2]}, [["k1", 0]], "", {"k1": [None, 2]}, id="remove field inside array (int index)"), + pytest.param({"k1": [1, 2]}, [["k1", "0"]], "", {"k1": [None, 2]}, id="remove field inside array (string index)"), pytest.param( {"k1": "v", "k2": "v", "k3": [0, 1], "k4": "v"}, [["k1"], ["k2"], ["k3", 0]], + "", {"k3": [None, 1], "k4": "v"}, id="test all cases (flat)", ), - pytest.param({"k1": [0, 1]}, [[".", "k1", 10]], {"k1": [0, 1]}, id="remove array index that doesn't exist (flat)"), - pytest.param({".": {"k1": [0, 1]}}, [[".", "k1", 10]], {".": {"k1": [0, 1]}}, id="remove array index that doesn't exist (nested)"), - pytest.param({".": {"k2": "v", "k1": "v"}}, [[".", "k1"]], {".": {"k2": "v"}}, id="remove nested field that exists"), + pytest.param({"k1": [0, 1]}, [[".", "k1", 10]], "", {"k1": [0, 1]}, id="remove array index that doesn't exist (flat)"), pytest.param( - {".": {"k2": "v", "k1": "v"}}, [[".", "k3"]], {".": {"k2": "v", "k1": "v"}}, id="remove field that doesn't exist (nested)" + {".": {"k1": [0, 1]}}, [[".", "k1", 10]], "", {".": {"k1": [0, 1]}}, id="remove array index that doesn't exist (nested)" ), - pytest.param({".": {"k2": "v", "k1": "v"}}, [[".", "k1"], [".", "k2"]], {".": {}}, id="remove multiple fields that exist (nested)"), + pytest.param({".": {"k2": "v", "k1": "v"}}, [[".", "k1"]], "", {".": {"k2": "v"}}, id="remove nested field that exists"), pytest.param( - {".": {"k1": [0, 1]}}, [[".", "k1", 0]], {".": {"k1": [None, 1]}}, id="remove multiple fields that exist in arrays (nested)" + {".": {"k2": "v", "k1": "v"}}, [[".", "k3"]], "", {".": {"k2": "v", "k1": "v"}}, id="remove field that doesn't exist (nested)" + ), + pytest.param( + {".": {"k2": "v", "k1": "v"}}, [[".", "k1"], [".", "k2"]], "", {".": {}}, id="remove multiple fields that exist (nested)" + ), + pytest.param( + {".": {"k1": [0, 1]}}, + [[".", "k1", 0]], + "", + {".": {"k1": [None, 1]}}, + id="remove multiple fields that exist in arrays (nested)", ), pytest.param( {".": {"k1": [{"k2": "v", "k3": "v"}, {"k4": "v"}]}}, [[".", "k1", 0, "k2"], [".", "k1", 1, "k4"]], + "", {".": {"k1": [{"k3": "v"}, {}]}}, id="remove fields that exist in arrays (deeply nested)", ), + pytest.param( + {"k1": "v", "k2": "v"}, + [["**"]], + "{{ False }}", + {"k1": "v", "k2": "v"}, + id="do not remove any field if condition is boolean False", + ), + pytest.param({"k1": "v", "k2": "v"}, [["**"]], "{{ True }}", {}, id="remove all field if condition is boolean True"), + pytest.param( + {"k1": "v", "k2": "v1", "k3": "v1", "k4": {"k_nested": "v1", "k_nested2": "v2"}}, + [["**"]], + "{{ property == 'v1' }}", + {"k1": "v", "k4": {"k_nested2": "v2"}}, + id="recursively remove any field that matches property condition and leave that does not", + ), + pytest.param( + {"k1": "v", "k2": "some_long_string", "k3": "some_long_string", "k4": {"k_nested": "v1", "k_nested2": "v2"}}, + [["**"]], + "{{ property|length > 5 }}", + {"k1": "v", "k4": {"k_nested": "v1", "k_nested2": "v2"}}, + id="remove any field that have length > 5 and leave that does not", + ), + pytest.param( + {"k1": 255, "k2": "some_string", "k3": "some_long_string", "k4": {"k_nested": 123123, "k_nested2": "v2"}}, + [["**"]], + "{{ property is integer }}", + {"k2": "some_string", "k3": "some_long_string", "k4": {"k_nested2": "v2"}}, + id="recursively remove any field that of type integer and leave that does not", + ), ], ) -def test_remove_fields(input_record: Mapping[str, Any], field_pointers: List[FieldPointer], expected: Mapping[str, Any]): - transformation = RemoveFields(field_pointers=field_pointers, parameters={}) +def test_remove_fields(input_record: Mapping[str, Any], field_pointers: List[FieldPointer], condition: str, expected: Mapping[str, Any]): + transformation = RemoveFields(field_pointers=field_pointers, condition=condition, parameters={}) assert transformation.transform(input_record) == expected diff --git a/airbyte-ci/connectors/CONNECTOR_CHECKLIST.yaml b/airbyte-ci/connectors/CONNECTOR_CHECKLIST.yaml deleted file mode 100644 index 7957973df4e7..000000000000 --- a/airbyte-ci/connectors/CONNECTOR_CHECKLIST.yaml +++ /dev/null @@ -1,11 +0,0 @@ -paths: - "airbyte-integrations/connectors/**": - - PR name follows [PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention) - - "[Breaking changes are considered](https://docs.airbyte.com/contributing-to-airbyte/change-cdk-connector/#breaking-changes-to-connectors). If a **Breaking Change** is being introduced, ensure an Airbyte engineer has created a Breaking Change Plan." - - Connector version has been incremented in the Dockerfile and metadata.yaml according to our [Semantic Versioning for Connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors) guidelines - - You've updated the connector's `metadata.yaml` file any other relevant changes, including a `breakingChanges` entry for major version bumps. See [metadata.yaml docs](https://docs.airbyte.com/connector-development/connector-metadata-file/) - - Secrets in the connector's spec are annotated with `airbyte_secret` - - All documentation files are up to date. (README.md, bootstrap.md, docs.md, etc...) - - Changelog updated in `docs/integrations//.md` with an entry for the new version. See changelog [example](https://docs.airbyte.io/integrations/sources/stripe#changelog) - - Migration guide updated in `docs/integrations//-migrations.md` with an entry for the new version, if the version is a breaking change. See migration guide [example](https://docs.airbyte.io/integrations/sources/faker-migrations) - - If set, you've ensured the icon is present in the `platform-internal` repo. ([Docs](https://docs.airbyte.com/connector-development/connector-metadata-file/#the-icon-field)) diff --git a/airbyte-ci/connectors/connectors_qa/README.md b/airbyte-ci/connectors/connectors_qa/README.md index e2c93285f133..3dc2ffa29830 100644 --- a/airbyte-ci/connectors/connectors_qa/README.md +++ b/airbyte-ci/connectors/connectors_qa/README.md @@ -101,6 +101,9 @@ poe lint ## Changelog +### 1.0.3 +Disable `CheckDocumentationStructure` for now. + ### 1.0.2 Fix access to connector types: it should be accessed from the `Connector.connector_type` attribute. diff --git a/airbyte-ci/connectors/connectors_qa/pyproject.toml b/airbyte-ci/connectors/connectors_qa/pyproject.toml index d76b722a4bb9..b0a761f23951 100644 --- a/airbyte-ci/connectors/connectors_qa/pyproject.toml +++ b/airbyte-ci/connectors/connectors_qa/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "connectors-qa" -version = "1.0.2" +version = "1.0.3" description = "A package to run QA checks on Airbyte connectors, generate reports and documentation." authors = ["Airbyte "] readme = "README.md" diff --git a/airbyte-ci/connectors/connectors_qa/src/connectors_qa/checks/documentation.py b/airbyte-ci/connectors/connectors_qa/src/connectors_qa/checks/documentation.py index bdd4b6eaaca2..289f1d7e5f3f 100644 --- a/airbyte-ci/connectors/connectors_qa/src/connectors_qa/checks/documentation.py +++ b/airbyte-ci/connectors/connectors_qa/src/connectors_qa/checks/documentation.py @@ -201,6 +201,6 @@ def _run(self, connector: Connector) -> CheckResult: ENABLED_CHECKS = [ CheckMigrationGuide(), CheckDocumentationExists(), - CheckDocumentationStructure(), + # CheckDocumentationStructure(), # Disabled as many are failing - we either need a big push or to block everyone. See https://github.com/airbytehq/airbyte/commit/4889e6e024d64ba0e353611f8fe67497b02de190#diff-3c73c6521bf819248b3d3d8aeab7cacfa4e8011f9890da93c77da925ece7eb20L262 CheckChangelogEntry(), ] diff --git a/airbyte-ci/connectors/pipelines/README.md b/airbyte-ci/connectors/pipelines/README.md index 99d25dd0c415..cf44ae7a4843 100644 --- a/airbyte-ci/connectors/pipelines/README.md +++ b/airbyte-ci/connectors/pipelines/README.md @@ -644,6 +644,7 @@ E.G.: running Poe tasks on the modified internal packages of the current branch: | Version | PR | Description | | ------- | ---------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------- | +| 4.3.1 | [#35437](https://github.com/airbytehq/airbyte/pull/35437) | Do not run QA checks on publish, just MetadataValidation. | | 4.3.0 | [#35438](https://github.com/airbytehq/airbyte/pull/35438) | Optionally disable telemetry with environment variable. | | 4.2.4 | [#35325](https://github.com/airbytehq/airbyte/pull/35325) | Use `connectors_qa` for QA checks and remove redundant checks. | | 4.2.3 | [#35322](https://github.com/airbytehq/airbyte/pull/35322) | Declare `connectors_qa` as an internal package for testing. | diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py index fb97c7e157bb..849a9348f115 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py @@ -13,8 +13,7 @@ from pipelines.airbyte_ci.connectors.build_image import steps from pipelines.airbyte_ci.connectors.publish.context import PublishConnectorContext from pipelines.airbyte_ci.connectors.reports import ConnectorReport -from pipelines.airbyte_ci.connectors.test.steps.common import QaChecks -from pipelines.airbyte_ci.metadata.pipeline import MetadataUpload +from pipelines.airbyte_ci.metadata.pipeline import MetadataUpload, MetadataValidation from pipelines.airbyte_ci.steps.python_registry import PublishToPythonRegistry, PythonRegistryPublishContext from pipelines.dagger.actions.remote_storage import upload_to_gcs from pipelines.dagger.actions.system import docker @@ -274,11 +273,11 @@ def create_connector_report(results: List[StepResult]) -> ConnectorReport: results = [] - qa_check_results = await QaChecks(context).run() - results.append(qa_check_results) + metadata_validation_results = await MetadataValidation(context).run() + results.append(metadata_validation_results) - # Exit early if the qa checks do not pass - if qa_check_results.status is not StepStatus.SUCCESS: + # Exit early if the metadata file is invalid. + if metadata_validation_results.status is not StepStatus.SUCCESS: return create_connector_report(results) check_connector_image_results = await CheckConnectorImageDoesNotExist(context).run() diff --git a/airbyte-ci/connectors/pipelines/pyproject.toml b/airbyte-ci/connectors/pipelines/pyproject.toml index 0ff42460da6c..4c6158f82964 100644 --- a/airbyte-ci/connectors/pipelines/pyproject.toml +++ b/airbyte-ci/connectors/pipelines/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pipelines" -version = "4.3.0" +version = "4.3.1" description = "Packaged maintained by the connector operations team to perform CI for connectors' pipelines" authors = ["Airbyte "] diff --git a/airbyte-ci/connectors/pipelines/tests/test_publish.py b/airbyte-ci/connectors/pipelines/tests/test_publish.py index 79430b9ea282..471c89ae6ece 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_publish.py +++ b/airbyte-ci/connectors/pipelines/tests/test_publish.py @@ -147,7 +147,7 @@ def test_parse_spec_output_no_spec(self, publish_context): STEPS_TO_PATCH = [ - (publish_pipeline, "QaChecks"), + (publish_pipeline, "MetadataValidation"), (publish_pipeline, "MetadataUpload"), (publish_pipeline, "CheckConnectorImageDoesNotExist"), (publish_pipeline, "UploadSpecToCache"), @@ -159,29 +159,29 @@ def test_parse_spec_output_no_spec(self, publish_context): @pytest.mark.parametrize("pre_release", [True, False]) -async def test_run_connector_publish_pipeline_when_failed_qa_checks(mocker, pre_release): - """We validate that no other steps are called if the qa checks step fails.""" +async def test_run_connector_publish_pipeline_when_failed_validation(mocker, pre_release): + """We validate that no other steps are called if the metadata validation step fails.""" for module, to_mock in STEPS_TO_PATCH: mocker.patch.object(module, to_mock, return_value=mocker.AsyncMock()) - run_qa_checks = publish_pipeline.QaChecks.return_value.run - run_qa_checks.return_value = mocker.Mock(status=StepStatus.FAILURE) + run_metadata_validation = publish_pipeline.MetadataValidation.return_value.run + run_metadata_validation.return_value = mocker.Mock(status=StepStatus.FAILURE) context = mocker.MagicMock(pre_release=pre_release) semaphore = anyio.Semaphore(1) report = await publish_pipeline.run_connector_publish_pipeline(context, semaphore) - run_qa_checks.assert_called_once() + run_metadata_validation.assert_called_once() # Check that nothing else is called for module, to_mock in STEPS_TO_PATCH: - if to_mock != "QaChecks": + if to_mock != "MetadataValidation": getattr(module, to_mock).return_value.run.assert_not_called() assert ( report.steps_results == context.report.steps_results == [ - run_qa_checks.return_value, + run_metadata_validation.return_value, ] ) @@ -200,8 +200,8 @@ async def test_run_connector_publish_pipeline_when_image_exists_or_failed(mocker for module, to_mock in STEPS_TO_PATCH: mocker.patch.object(module, to_mock, return_value=mocker.AsyncMock()) - run_qa_checks = publish_pipeline.QaChecks.return_value.run - run_qa_checks.return_value = mocker.Mock(status=StepStatus.SUCCESS) + run_metadata_validation = publish_pipeline.MetadataValidation.return_value.run + run_metadata_validation.return_value = mocker.Mock(status=StepStatus.SUCCESS) # ensure spec always succeeds run_upload_spec_to_cache = publish_pipeline.UploadSpecToCache.return_value.run @@ -214,12 +214,12 @@ async def test_run_connector_publish_pipeline_when_image_exists_or_failed(mocker semaphore = anyio.Semaphore(1) report = await publish_pipeline.run_connector_publish_pipeline(publish_context, semaphore) - run_qa_checks.assert_called_once() + run_metadata_validation.assert_called_once() run_check_connector_image_does_not_exist.assert_called_once() # Check that nothing else is called for module, to_mock in STEPS_TO_PATCH: - if to_mock not in ["QaChecks", "MetadataUpload", "CheckConnectorImageDoesNotExist", "UploadSpecToCache"]: + if to_mock not in ["MetadataValidation", "MetadataUpload", "CheckConnectorImageDoesNotExist", "UploadSpecToCache"]: getattr(module, to_mock).return_value.run.assert_not_called() if check_image_exists_status is StepStatus.SKIPPED: @@ -228,7 +228,7 @@ async def test_run_connector_publish_pipeline_when_image_exists_or_failed(mocker report.steps_results == publish_context.report.steps_results == [ - run_qa_checks.return_value, + run_metadata_validation.return_value, run_check_connector_image_does_not_exist.return_value, run_upload_spec_to_cache.return_value, run_metadata_upload.return_value, @@ -241,7 +241,7 @@ async def test_run_connector_publish_pipeline_when_image_exists_or_failed(mocker report.steps_results == publish_context.report.steps_results == [ - run_qa_checks.return_value, + run_metadata_validation.return_value, run_check_connector_image_does_not_exist.return_value, ] ) @@ -268,10 +268,12 @@ async def test_run_connector_publish_pipeline_when_image_does_not_exist( upload_to_spec_cache_step_status, metadata_upload_step_status, ): - """We check that the full pipeline is executed as expected when the connector image does not exist and the qa checks passed.""" + """We check that the full pipeline is executed as expected when the connector image does not exist and the metadata validation passed.""" for module, to_mock in STEPS_TO_PATCH: mocker.patch.object(module, to_mock, return_value=mocker.AsyncMock()) - publish_pipeline.QaChecks.return_value.run.return_value = mocker.Mock(name="qa_checks_result", status=StepStatus.SUCCESS) + publish_pipeline.MetadataValidation.return_value.run.return_value = mocker.Mock( + name="metadata_validation_result", status=StepStatus.SUCCESS + ) publish_pipeline.CheckConnectorImageDoesNotExist.return_value.run.return_value = mocker.Mock( name="check_connector_image_does_not_exist_result", status=StepStatus.SUCCESS ) @@ -306,7 +308,7 @@ async def test_run_connector_publish_pipeline_when_image_does_not_exist( report = await publish_pipeline.run_connector_publish_pipeline(context, semaphore) steps_to_run = [ - publish_pipeline.QaChecks.return_value.run, + publish_pipeline.MetadataValidation.return_value.run, publish_pipeline.CheckConnectorImageDoesNotExist.return_value.run, publish_pipeline.steps.run_connector_build, publish_pipeline.PushConnectorImageToRegistry.return_value.run, @@ -364,7 +366,7 @@ async def test_run_connector_python_registry_publish_pipeline( ) for step in [ - publish_pipeline.QaChecks, + publish_pipeline.MetadataValidation, publish_pipeline.CheckConnectorImageDoesNotExist, publish_pipeline.UploadSpecToCache, publish_pipeline.MetadataUpload, diff --git a/airbyte-integrations/connectors/destination-kvdb/.dockerignore b/airbyte-integrations/connectors/destination-kvdb/.dockerignore deleted file mode 100644 index 1b4b5767b554..000000000000 --- a/airbyte-integrations/connectors/destination-kvdb/.dockerignore +++ /dev/null @@ -1,5 +0,0 @@ -* -!Dockerfile -!main.py -!destination_kvdb -!setup.py diff --git a/airbyte-integrations/connectors/destination-kvdb/Dockerfile b/airbyte-integrations/connectors/destination-kvdb/Dockerfile deleted file mode 100644 index 31fd4d41eefb..000000000000 --- a/airbyte-integrations/connectors/destination-kvdb/Dockerfile +++ /dev/null @@ -1,38 +0,0 @@ -FROM python:3.9.11-alpine3.15 as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk --no-cache add tzdata build-base - - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apk --no-cache add bash - -# copy payload code only -COPY main.py ./ -COPY destination_kvdb ./destination_kvdb - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.1.2 -LABEL io.airbyte.name=airbyte/destination-kvdb diff --git a/airbyte-integrations/connectors/destination-kvdb/metadata.yaml b/airbyte-integrations/connectors/destination-kvdb/metadata.yaml index 9c553db1318b..64a38cfa1441 100644 --- a/airbyte-integrations/connectors/destination-kvdb/metadata.yaml +++ b/airbyte-integrations/connectors/destination-kvdb/metadata.yaml @@ -4,10 +4,12 @@ data: enabled: false oss: enabled: false + connectorBuildOptions: + baseImage: docker.io/airbyte/python-connector-base:1.2.0@sha256:c22a9d97464b69d6ef01898edf3f8612dc11614f05a84984451dde195f337db9 connectorSubtype: api connectorType: destination definitionId: f2e549cd-8e2a-48f8-822d-cc13630eb42d - dockerImageTag: 0.1.2 + dockerImageTag: 0.1.3 dockerRepository: airbyte/destination-kvdb githubIssueLabel: destination-kvdb icon: kvdb.svg diff --git a/airbyte-integrations/connectors/destination-kvdb/poetry.lock b/airbyte-integrations/connectors/destination-kvdb/poetry.lock new file mode 100644 index 000000000000..7835868ea21e --- /dev/null +++ b/airbyte-integrations/connectors/destination-kvdb/poetry.lock @@ -0,0 +1,1108 @@ +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. + +[[package]] +name = "airbyte-cdk" +version = "0.62.2" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte-cdk-0.62.2.tar.gz", hash = "sha256:bf45cb847e2d2ab7063d0e1989f6c9cf022771c6ae4fb1e854438c3b8377da85"}, + {file = "airbyte_cdk-0.62.2-py3-none-any.whl", hash = "sha256:6d04d2e8a9a32aa707ddf27a1916ac76969fb50ac39d60582ad2daa08ef832ef"}, +] + +[package.dependencies] +airbyte-protocol-models = "0.5.1" +backoff = "*" +cachetools = "*" +Deprecated = ">=1.2,<2.0" +dpath = ">=2.0.1,<2.1.0" +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<1.0" +jsonschema = ">=3.2.0,<3.3.0" +pendulum = "<3.0.0" +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +python-dateutil = "*" +PyYAML = ">=6.0.1" +requests = "*" +requests-cache = "*" +wcmatch = "8.4" + +[package.extras] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.5.1" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "atomicwrites" +version = "1.4.1" +description = "Atomic file writes." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.2" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, + {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "freezegun" +version = "1.4.0" +description = "Let your Python tests travel through time" +optional = false +python-versions = ">=3.7" +files = [ + {file = "freezegun-1.4.0-py3-none-any.whl", hash = "sha256:55e0fc3c84ebf0a96a5aa23ff8b53d70246479e9a68863f1fcac5a3e52f19dd6"}, + {file = "freezegun-1.4.0.tar.gz", hash = "sha256:10939b0ba0ff5adaecf3b06a5c2f73071d9678e507c5eaedb23c761d56ac774b"}, +] + +[package.dependencies] +python-dateutil = ">=2.7" + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "graphql-core" +version = "3.2.3" +description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL." +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "graphql-core-3.2.3.tar.gz", hash = "sha256:06d2aad0ac723e35b1cb47885d3e5c45e956a53bc1b209a9fc5369007fe46676"}, + {file = "graphql_core-3.2.3-py3-none-any.whl", hash = "sha256:5766780452bd5ec8ba133f8bf287dc92713e3868ddd83aee4faab9fc3e303dc3"}, +] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonref" +version = "0.3.0" +description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +optional = false +python-versions = ">=3.3,<4.0" +files = [ + {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, + {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + +[[package]] +name = "pydantic" +version = "1.10.14" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, + {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, + {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, + {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, + {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, + {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, + {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, + {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, + {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytest" +version = "6.2.5" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +toml = "*" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "pytest-mock" +version = "3.12.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, + {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, +] + +[package.dependencies] +pytest = ">=5.0" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "requests-mock" +version = "1.11.0" +description = "Mock out responses from the requests package" +optional = false +python-versions = "*" +files = [ + {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, + {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, +] + +[package.dependencies] +requests = ">=2.3,<3" +six = "*" + +[package.extras] +fixture = ["fixtures"] +test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] + +[[package]] +name = "responses" +version = "0.23.3" +description = "A utility library for mocking out the `requests` Python library." +optional = false +python-versions = ">=3.7" +files = [ + {file = "responses-0.23.3-py3-none-any.whl", hash = "sha256:e6fbcf5d82172fecc0aa1860fd91e58cbfd96cee5e96da5b63fa6eb3caa10dd3"}, + {file = "responses-0.23.3.tar.gz", hash = "sha256:205029e1cb334c21cb4ec64fc7599be48b859a0fd381a42443cdd600bfe8b16a"}, +] + +[package.dependencies] +pyyaml = "*" +requests = ">=2.30.0,<3.0" +types-PyYAML = "*" +urllib3 = ">=1.25.10,<3.0" + +[package.extras] +tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-requests"] + +[[package]] +name = "setuptools" +version = "69.1.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, + {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "sgqlc" +version = "16.3" +description = "Simple GraphQL Client" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "sgqlc-16.3-py3-none-any.whl", hash = "sha256:89d468386a4ba4b5ade991623228b6fb0a25bea1f25643ccac130fb3ef565b72"}, + {file = "sgqlc-16.3.tar.gz", hash = "sha256:be08857775aa3e65ef7b2c1f0cdcc65dd5794907b162b393c189187fee664558"}, +] + +[package.dependencies] +graphql-core = ">=3.1.7,<4.0.0" + +[package.extras] +requests = ["requests"] +websocket = ["websocket-client"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "types-pyyaml" +version = "6.0.12.12" +description = "Typing stubs for PyYAML" +optional = false +python-versions = "*" +files = [ + {file = "types-PyYAML-6.0.12.12.tar.gz", hash = "sha256:334373d392fde0fdf95af5c3f1661885fa10c52167b14593eb856289e1855062"}, + {file = "types_PyYAML-6.0.12.12-py3-none-any.whl", hash = "sha256:c05bc6c158facb0676674b7f11fe3960db4f389718e19e62bd2b84d6205cfd24"}, +] + +[[package]] +name = "typing-extensions" +version = "4.9.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, + {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, +] + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "40cc246c45e6c2d626e016673f3aa60794f3464d82c8ccd0b62a6b66df2b30da" diff --git a/airbyte-integrations/connectors/destination-kvdb/pyproject.toml b/airbyte-integrations/connectors/destination-kvdb/pyproject.toml new file mode 100644 index 000000000000..2f61ed29c449 --- /dev/null +++ b/airbyte-integrations/connectors/destination-kvdb/pyproject.toml @@ -0,0 +1,31 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "0.1.3" +name = "destination-kvdb" +description = "Destination implementation for kvdb." +authors = [ "Airbyte ",] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/destinations/kvdb" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +[[tool.poetry.packages]] +include = "destination_kvdb" + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = "^0.62.1" +sgqlc = "==16.3" + +[tool.poetry.scripts] +destination-kvdb = "destination_kvdb.run:run" + +[tool.poetry.group.dev.dependencies] +requests-mock = "^1.9.3" +freezegun = "^1.2" +pytest-mock = "^3.6.1" +pytest = "^6.2" +responses = "^0.23.1" diff --git a/airbyte-integrations/connectors/destination-kvdb/setup.py b/airbyte-integrations/connectors/destination-kvdb/setup.py deleted file mode 100644 index dab5520718ab..000000000000 --- a/airbyte-integrations/connectors/destination-kvdb/setup.py +++ /dev/null @@ -1,26 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk", - "requests", -] - -TEST_REQUIREMENTS = ["pytest~=6.1"] - -setup( - name="destination_kvdb", - description="Destination implementation for Kvdb.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json"]}, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/conftest.py b/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/conftest.py index b2e2c416bc3c..e7d6bc323d60 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/conftest.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/conftest.py @@ -28,8 +28,41 @@ def config_with_wrong_account_fixture(config): @pytest.fixture(scope="session", name="config_with_include_deleted") -def config_with_include_deleted_fixture(config): - new_config = {**config, "include_deleted": True} +def config_with_include_deleted(config): + new_config = { + **config, + "campaign_statuses": [ + "ACTIVE", + "ARCHIVED", + "DELETED", + "IN_PROCESS", + "PAUSED", + "WITH_ISSUES", + ], + "adset_statuses": [ + "ACTIVE", + "ARCHIVED", + "CAMPAIGN_PAUSED", + "DELETED", + "IN_PROCESS", + "PAUSED", + "WITH_ISSUES", + ], + "ad_statuses": [ + "ACTIVE", + "ADSET_PAUSED", + "ARCHIVED", + "CAMPAIGN_PAUSED", + "DELETED", + "DISAPPROVED", + "IN_PROCESS", + "PAUSED", + "PENDING_BILLING_INFO", + "PENDING_REVIEW", + "PREAPPROVED", + "WITH_ISSUES", + ], + } new_config.pop("_limit", None) new_config.pop("end_date", None) return new_config diff --git a/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/expected_records.jsonl index 324435dbda63..4859b8984362 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/expected_records.jsonl @@ -1,29 +1,29 @@ -{"stream": "ad_account", "data": {"id": "act_212551616838260", "account_id": "212551616838260", "account_status": 1, "age": 1406.5197337963, "amount_spent": "39125", "balance": "0", "business": {"id": "1506473679510495", "name": "Airbyte"}, "business_city": "", "business_country_code": "US", "business_name": "", "business_street": "", "business_street2": "", "can_create_brand_lift_study": false, "capabilities": ["CAN_CREATE_CALL_ADS", "CAN_SEE_GROWTH_OPPORTUNITY_DATA", "ENABLE_IA_RECIRC_AD_DISPLAY_FORMAT", "CAN_USE_MOBILE_EXTERNAL_PAGE_TYPE", "CAN_USE_FB_FEED_POSITION_IN_VIDEO_VIEW_15S", "ENABLE_BIZ_DISCO_ADS", "ENABLE_BRAND_OBJECTIVES_FOR_BIZ_DISCO_ADS", "ENABLE_DIRECT_REACH_FOR_BIZ_DISCO_ADS", "ENABLE_DYNAMIC_ADS_ON_IG_STORIES_ADS", "ENABLE_IG_STORIES_ADS_PPE_OBJECTIVE", "ENABLE_IG_STORIES_ADS_MESSENGER_DESTINATION", "ENABLE_PAC_FOR_BIZ_DISCO_ADS", "CAN_USE_FB_INSTREAM_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_FB_STORY_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_AN_INSTREAM_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_IG_STORY_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_FB_IA_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_FB_SUG_VIDEO_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_FB_MKT_PLACE_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_IG_FEED_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_IG_EXPLORE_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_AN_CLASSIC_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_AN_REWARD_VIDEO_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_REACH_AND_FREQUENCY", "CAN_USE_RECURRING_BUDGET", "HAS_VALID_PAYMENT_METHODS", "CAN_USE_LINK_CLICK_BILLING_EVENT", "CAN_USE_CPA_BILLING_EVENT", "CAN_SEE_NEW_CONVERSION_WINDOW_NUX", "ADS_INSTREAM_INTERFACE_INTEGRITY", "ADS_INSTREAM_LINK_CLICK", "ADS_INSTREAM_LINK_CLICK_IMAGE", "ADS_IN_OBJECTIVES_DEPRECATION", "MESSENGER_INBOX_ADS_PRODUCT_CATALOG_SALES", "CAN_SHOW_MESSENGER_DUPLICSTION_UPSELL", "ALLOW_INSTREAM_ONLY_FOR_REACH", "ADS_INSTREAM_VIDEO_PLACEMENT_CONVERSIONS", "CAN_CREATE_INSTAGRAM_EXPLORE_ADS", "ALLOW_INSTREAM_VIDEOS_PLACEMENT_ONLY", "ALLOW_INSTREAM_NON_INTERRUPTIVE_LEADGEN", "INSTREAM_VIDEO_AD_DESKTOP_CONVERSION_AD_PREVIEW", "ALLOW_INSTREAM_ONLY_FOR_BRAND_AWARENESS_AUCTION", "ALLOW_SUGGESTED_VIDEOS_PLACEMENT_ONLY", "WHATSAPP_DESTINATION_ADS", "CTM_ADS_CREATION_CLICK_TO_DIRECT", "CTW_ADS_ENABLE_IG_FEED_PLACEMENT", "CTW_ADS_FOR_NON_MESSAGES_OBJECTIVE", "CTW_ADS_TRUSTED_TIER_2_PLUS_ADVERTISER", "CTW_ADS_TRUSTED_TIER_ADVERTISER", "ADS_PLACEMENT_MARKETPLACE", "ADNW_DISABLE_INSTREAM_AND_WEB_PLACEMENT", "CAN_CHANGE_BILLING_THRESHOLD", "CAN_USE_APP_EVENT_AVERAGE_COST_BIDDING", "CAN_USE_LEAD_GEN_AVERAGE_COST_BIDDING", "ADS_VALUE_OPTIMIZATION_DYNAMIC_ADS_1D", "ADS_DELIVERY_INSIGHTS_IN_BIDDING_PRESET_EXPERIMENT", "ADS_DELIVERY_INSIGHTS_OPTIMIZATION_PRESET", "CAN_SEE_APP_AD_EVENTS", "CAN_SEE_NEW_STANDARD_EVENTS_BETA", "CAN_SEE_VCK_HOLIDAY_TEMPLATES", "ENABLE_DCO_FOR_FB_STORY_ADS", "CAN_USE_IG_EXPLORE_GRID_HOME_PLACEMENT", "CAN_USE_IG_EXPLORE_HOME_IN_REACH_AND_FREQUENCY", "CAN_USE_IG_EXPLORE_HOME_POST_ENGAGEMENT_MESSAGES", "CAN_USE_IG_SEARCH_PLACEMENT", "CAN_USE_IG_SEARCH_RESULTS_AUTO_PLACEMENT", "CAN_USE_IG_REELS_PAC_CAROUSEL", "CAN_USE_IG_REELS_POSITION", "CAN_SEE_CONVERSION_LIFT_SUMMARY", "CAN_USE_IG_PROFILE_FEED_POSITION", "CAN_USE_IG_REELS_REACH_AND_FREQUENCY", "CAN_USE_IG_REELS_OVERLAY_POSITION", "CAN_USE_IG_REELS_OVERLAY_PAC", "CAN_USE_IG_SHOP_TAB_PAC", "CAN_SEE_LEARNING_STAGE", "ENABLE_WEBSITE_CONVERSIONS_FOR_FB_STORY_ADS", "ENABLE_MESSENGER_INBOX_VIDEO_ADS", "ENABLE_VIDEO_VIEWS_FOR_FB_STORY_ADS", "ENABLE_LINK_CLICKS_FOR_FB_STORY_ADS", "ENABLE_REACH_FOR_FB_STORY_ADS", "CAN_USE_CALL_TO_ACTION_LINK_IMPORT_EXPORT", "ADS_INSTREAM_VIDEO_ENABLE_SLIDE_SHOW", "ALLOW_INSTREAM_VIDEOS_PLACEMENT_ONLY_IN_VV_REACH_AND_FREQUENCY", "ENABLE_MOBILE_APP_INSTALLS_FOR_FB_STORY_ADS", "ENABLE_LEAD_GEN_FOR_FB_STORY_ADS", "CAN_USE_FB_MKT_PLACE_POSITION_IN_REACH", "CAN_USE_FB_MKT_PLACE_POSITION_IN_VIDEO_VIEW", "CAN_USE_FB_MKT_PLACE_POSITION_IN_STORE_VISIT", "ENABLE_MOBILE_APP_ENGAGEMENT_FOR_FB_STORY_ADS", "CAN_USE_FB_MKT_PLACE_POSITION_IN_BRAND_AWARENESS", "CAN_USE_FB_MKT_PLACE_POSITION_IN_APP_INSTALLS", "CAN_USE_FB_MKT_PLACE_POSITION_IN_LEAD_GENERATION", "CAN_USE_FB_MKT_PLACE_POSITION_IN_MESSAGE", "CAN_USE_FB_MKT_PLACE_POSITION_IN_PAGE_LIKE", "CAN_USE_FB_MKT_PLACE_POSITION_IN_POST_ENGAGEMENT", "RF_ALLOW_MARKETPLACE_ACCOUNT", "RF_ALLOW_SEARCH_ACCOUNT", "VERTICAL_VIDEO_PAC_INSTREAM_UPSELL", "IX_COLLECTION_ENABLED_FOR_BAO_AND_REACH", "ADS_BM_REQUIREMENTS_OCT_15_RELEASE", "ENABLE_POST_ENGAGEMENT_FOR_FB_STORY", "ENBABLE_CATALOG_SALES_FOR_FB_STORY", "CAN_USE_WHATSAPP_DESTINATION_ON_LINK_CLICKS_AND_CONVERSIONS", "CAN_USE_WHATSAPP_DESTINATION_ON_CONVERSIONS", "IS_NON_TAIL_AD_ACCOUNT", "IS_IN_IG_EXISTING_POST_CTA_DEFAULTING_EXPERIMENT", "IS_IN_SHORT_WA_LINK_CTWA_UNCONV_TRAFFIC_EXPERIMENT", "IS_IN_ODAX_EXPERIENCE", "IS_IN_REACH_BRAND_AWARENESS_WHATSAPP_L1_DESTINATION_EXPERIMENT", "IS_IN_VIDEO_VIEWS_WHATSAPP_L1_DESTINATION_EXPERIMENT", "IS_IN_WHATSAPP_DESTINATION_DEFAULTING_EXPERIMENT", "CAN_USE_MARKETPLACE_DESKTOP", "ADS_MERCHANT_OVERLAYS_DEPRECATION", "CONNECTIONS_DEPRECATION_V2", "CAN_USE_LIVE_VIDEO_FOR_THRUPLAY", "CAN_SEE_HEC_AM_FLOW", "CAN_SEE_POLITICAL_FLOW", "ADS_INSTREAM_PLACEMENT_CATALOG_SALES", "ENABLE_CONVERSIONS_FOR_FB_GROUP_TAB_ADS", "ENABLE_LINK_CLICK_FOR_FB_GROUP_TAB_ADS", "ENABLE_REACH_FOR_FB_GROUP_TAB_ADS", "CAN_USE_CONVERSATIONS_OPTIMIZATION", "ENABLE_THRUPLAY_OPTIMIZATION_MESSENGER_STORY_ADS", "CAN_USE_IG_STORY_POLLS_PAC_CREATION", "IOS14_CEO_CAMPAIGN_CREATION", "ENABLE_VIDEO_CHANNEL_PLACEMENT_FOR_RSVP_ADS", "DIGITAL_CIRCULAR_ADS", "CAN_SEE_SAFR_V3_FLOW", "CAN_USE_FB_REELS_POSITION", "CAN_USE_ADS_ON_FB_REELS_POSITION", "CAN_USE_FB_REELS_AUTO_PLACEMENT", "ENABLE_FB_REELS_CREATION_PAC_ADS", "ENABLE_FB_REELS_CREATION_DCO_ADS", "ENABLE_FB_REELS_POSTLOOP_CREATION_DCO_ADS", "ENABLE_FB_REELS_POSTLOOP_CREATION_PAC_ADS", "RF_CPA_BILLING_DEPRECATION_PHASE_2", "ENABLE_APP_INSTALL_CUSTOM_PRODUCT_PAGES", "ENABLE_ADS_ON_FB_REELS_PLACEMENT_UNIFICATION", "ADS_RF_FB_REELS_PLACEMENT", "REELS_DM_ADS_ENABLE_REACH_AND_FREQUENCY", "ELIGIBLE_FOR_TEXT_GEN", "CAN_USE_BUDGET_SCHEDULING_API", "ADS_AEMV2_HAS_LAUNCHED"], "created_time": "2020-04-13T18:04:59-0700", "currency": "USD", "disable_reason": 0.0, "end_advertiser": 1506473679510495.0, "end_advertiser_name": "Airbyte", "fb_entity": 85.0, "funding_source": 2825262454257003.0, "funding_source_details": {"id": "2825262454257003", "type": 1}, "has_migrated_permissions": true, "is_attribution_spec_system_default": true, "is_direct_deals_enabled": false, "is_in_3ds_authorization_enabled_market": false, "is_notifications_enabled": true, "is_personal": 0.0, "is_prepay_account": false, "is_tax_id_required": false, "min_campaign_group_spend_cap": 10000.0, "min_daily_budget": 100.0, "name": "Airbyte", "offsite_pixels_tos_accepted": true, "owner": 1506473679510495.0, "rf_spec": {"min_reach_limits": {"US": 200000, "CA": 200000, "GB": 200000, "AR": 200000, "AU": 200000, "AT": 200000, "BE": 200000, "BR": 200000, "CL": 200000, "CN": 200000, "CO": 200000, "HR": 200000, "DK": 200000, "DO": 200000, "EG": 200000, "FI": 200000, "FR": 200000, "DE": 200000, "GR": 200000, "HK": 200000, "IN": 200000, "ID": 200000, "IE": 200000, "IL": 200000, "IT": 200000, "JP": 200000, "JO": 200000, "KW": 200000, "LB": 200000, "MY": 200000, "MX": 200000, "NL": 200000, "NZ": 200000, "NG": 200000, "NO": 200000, "PK": 200000, "PA": 200000, "PE": 200000, "PH": 200000, "PL": 200000, "RU": 200000, "SA": 200000, "RS": 200000, "SG": 200000, "ZA": 200000, "KR": 200000, "ES": 200000, "SE": 200000, "CH": 200000, "TW": 200000, "TH": 200000, "TR": 200000, "AE": 200000, "VE": 200000, "PT": 200000, "LU": 200000, "BG": 200000, "CZ": 200000, "SI": 200000, "IS": 200000, "SK": 200000, "LT": 200000, "TT": 200000, "BD": 200000, "LK": 200000, "KE": 200000, "HU": 200000, "MA": 200000, "CY": 200000, "JM": 200000, "EC": 200000, "RO": 200000, "BO": 200000, "GT": 200000, "CR": 200000, "QA": 200000, "SV": 200000, "HN": 200000, "NI": 200000, "PY": 200000, "UY": 200000, "PR": 200000, "BA": 200000, "PS": 200000, "TN": 200000, "BH": 200000, "VN": 200000, "GH": 200000, "MU": 200000, "UA": 200000, "MT": 200000, "BS": 200000, "MV": 200000, "OM": 200000, "MK": 200000, "LV": 200000, "EE": 200000, "IQ": 200000, "DZ": 200000, "AL": 200000, "NP": 200000, "MO": 200000, "ME": 200000, "SN": 200000, "GE": 200000, "BN": 200000, "UG": 200000, "GP": 200000, "BB": 200000, "AZ": 200000, "TZ": 200000, "LY": 200000, "MQ": 200000, "CM": 200000, "BW": 200000, "ET": 200000, "KZ": 200000, "NA": 200000, "MG": 200000, "NC": 200000, "MD": 200000, "FJ": 200000, "BY": 200000, "JE": 200000, "GU": 200000, "YE": 200000, "ZM": 200000, "IM": 200000, "HT": 200000, "KH": 200000, "AW": 200000, "PF": 200000, "AF": 200000, "BM": 200000, "GY": 200000, "AM": 200000, "MW": 200000, "AG": 200000, "RW": 200000, "GG": 200000, "GM": 200000, "FO": 200000, "LC": 200000, "KY": 200000, "BJ": 200000, "AD": 200000, "GD": 200000, "VI": 200000, "BZ": 200000, "VC": 200000, "MN": 200000, "MZ": 200000, "ML": 200000, "AO": 200000, "GF": 200000, "UZ": 200000, "DJ": 200000, "BF": 200000, "MC": 200000, "TG": 200000, "GL": 200000, "GA": 200000, "GI": 200000, "CD": 200000, "KG": 200000, "PG": 200000, "BT": 200000, "KN": 200000, "SZ": 200000, "LS": 200000, "LA": 200000, "LI": 200000, "MP": 200000, "SR": 200000, "SC": 200000, "VG": 200000, "TC": 200000, "DM": 200000, "MR": 200000, "AX": 200000, "SM": 200000, "SL": 200000, "NE": 200000, "CG": 200000, "AI": 200000, "YT": 200000, "CV": 200000, "GN": 200000, "TM": 200000, "BI": 200000, "TJ": 200000, "VU": 200000, "SB": 200000, "ER": 200000, "WS": 200000, "AS": 200000, "FK": 200000, "GQ": 200000, "TO": 200000, "KM": 200000, "PW": 200000, "FM": 200000, "CF": 200000, "SO": 200000, "MH": 200000, "VA": 200000, "TD": 200000, "KI": 200000, "ST": 200000, "TV": 200000, "NR": 200000, "RE": 200000, "LR": 200000, "ZW": 200000, "CI": 200000, "MM": 200000, "AN": 200000, "AQ": 200000, "BQ": 200000, "BV": 200000, "IO": 200000, "CX": 200000, "CC": 200000, "CK": 200000, "CW": 200000, "TF": 200000, "GW": 200000, "HM": 200000, "XK": 200000, "MS": 200000, "NU": 200000, "NF": 200000, "PN": 200000, "BL": 200000, "SH": 200000, "MF": 200000, "PM": 200000, "SX": 200000, "GS": 200000, "SS": 200000, "SJ": 200000, "TL": 200000, "TK": 200000, "UM": 200000, "WF": 200000, "EH": 200000}, "countries": ["US", "CA", "GB", "AR", "AU", "AT", "BE", "BR", "CL", "CN", "CO", "HR", "DK", "DO", "EG", "FI", "FR", "DE", "GR", "HK", "IN", "ID", "IE", "IL", "IT", "JP", "JO", "KW", "LB", "MY", "MX", "NL", "NZ", "NG", "NO", "PK", "PA", "PE", "PH", "PL", "RU", "SA", "RS", "SG", "ZA", "KR", "ES", "SE", "CH", "TW", "TH", "TR", "AE", "VE", "PT", "LU", "BG", "CZ", "SI", "IS", "SK", "LT", "TT", "BD", "LK", "KE", "HU", "MA", "CY", "JM", "EC", "RO", "BO", "GT", "CR", "QA", "SV", "HN", "NI", "PY", "UY", "PR", "BA", "PS", "TN", "BH", "VN", "GH", "MU", "UA", "MT", "BS", "MV", "OM", "MK", "EE", "LV", "IQ", "DZ", "AL", "NP", "MO", "ME", "SN", "GE", "BN", "UG", "GP", "BB", "ZW", "CI", "AZ", "TZ", "LY", "MQ", "MM", "CM", "BW", "ET", "KZ", "NA", "MG", "NC", "MD", "FJ", "BY", "JE", "GU", "YE", "ZM", "IM", "HT", "KH", "AW", "PF", "AF", "BM", "GY", "AM", "MW", "AG", "RW", "GG", "GM", "FO", "LC", "KY", "BJ", "AD", "GD", "VI", "BZ", "VC", "MN", "MZ", "ML", "AO", "GF", "UZ", "DJ", "BF", "MC", "TG", "GL", "GA", "GI", "CD", "KG", "PG", "BT", "KN", "SZ", "LS", "LA", "LI", "MP", "SR", "SC", "VG", "TC", "DM", "MR", "AX", "SM", "SL", "NE", "CG", "AI", "YT", "LR", "CV", "GN", "TM", "BI", "TJ", "VU", "SB", "ER", "WS", "AS", "FK", "GQ", "TO", "KM", "PW", "FM", "CF", "SO", "MH", "VA", "TD", "KI", "ST", "TV", "NR", "RE", "AN", "AQ", "BQ", "BV", "IO", "CX", "CC", "CK", "CW", "TF", "GW", "HM", "XK", "MS", "NU", "NF", "PN", "BL", "SH", "MF", "PM", "SX", "GS", "SS", "SJ", "TL", "TK", "UM", "WF", "EH"], "min_campaign_duration": {"US": 1, "CA": 1, "GB": 1, "AR": 1, "AU": 1, "AT": 1, "BE": 1, "BR": 1, "CL": 1, "CN": 1, "CO": 1, "HR": 1, "DK": 1, "DO": 1, "EG": 1, "FI": 1, "FR": 1, "DE": 1, "GR": 1, "HK": 1, "IN": 1, "ID": 1, "IE": 1, "IL": 1, "IT": 1, "JP": 1, "JO": 1, "KW": 1, "LB": 1, "MY": 1, "MX": 1, "NL": 1, "NZ": 1, "NG": 1, "NO": 1, "PK": 1, "PA": 1, "PE": 1, "PH": 1, "PL": 1, "RU": 1, "SA": 1, "RS": 1, "SG": 1, "ZA": 1, "KR": 1, "ES": 1, "SE": 1, "CH": 1, "TW": 1, "TH": 1, "TR": 1, "AE": 1, "VE": 1, "PT": 1, "LU": 1, "BG": 1, "CZ": 1, "SI": 1, "IS": 1, "SK": 1, "LT": 1, "TT": 1, "BD": 1, "LK": 1, "KE": 1, "HU": 1, "MA": 1, "CY": 1, "JM": 1, "EC": 1, "RO": 1, "BO": 1, "GT": 1, "CR": 1, "QA": 1, "SV": 1, "HN": 1, "NI": 1, "PY": 1, "UY": 1, "PR": 1, "BA": 1, "PS": 1, "TN": 1, "BH": 1, "VN": 1, "GH": 1, "MU": 1, "UA": 1, "MT": 1, "BS": 1, "MV": 1, "OM": 1, "MK": 1, "LV": 1, "EE": 1, "IQ": 1, "DZ": 1, "AL": 1, "NP": 1, "MO": 1, "ME": 1, "SN": 1, "GE": 1, "BN": 1, "UG": 1, "GP": 1, "BB": 1, "AZ": 1, "TZ": 1, "LY": 1, "MQ": 1, "CM": 1, "BW": 1, "ET": 1, "KZ": 1, "NA": 1, "MG": 1, "NC": 1, "MD": 1, "FJ": 1, "BY": 1, "JE": 1, "GU": 1, "YE": 1, "ZM": 1, "IM": 1, "HT": 1, "KH": 1, "AW": 1, "PF": 1, "AF": 1, "BM": 1, "GY": 1, "AM": 1, "MW": 1, "AG": 1, "RW": 1, "GG": 1, "GM": 1, "FO": 1, "LC": 1, "KY": 1, "BJ": 1, "AD": 1, "GD": 1, "VI": 1, "BZ": 1, "VC": 1, "MN": 1, "MZ": 1, "ML": 1, "AO": 1, "GF": 1, "UZ": 1, "DJ": 1, "BF": 1, "MC": 1, "TG": 1, "GL": 1, "GA": 1, "GI": 1, "CD": 1, "KG": 1, "PG": 1, "BT": 1, "KN": 1, "SZ": 1, "LS": 1, "LA": 1, "LI": 1, "MP": 1, "SR": 1, "SC": 1, "VG": 1, "TC": 1, "DM": 1, "MR": 1, "AX": 1, "SM": 1, "SL": 1, "NE": 1, "CG": 1, "AI": 1, "YT": 1, "CV": 1, "GN": 1, "TM": 1, "BI": 1, "TJ": 1, "VU": 1, "SB": 1, "ER": 1, "WS": 1, "AS": 1, "FK": 1, "GQ": 1, "TO": 1, "KM": 1, "PW": 1, "FM": 1, "CF": 1, "SO": 1, "MH": 1, "VA": 1, "TD": 1, "KI": 1, "ST": 1, "TV": 1, "NR": 1, "RE": 1, "LR": 1, "ZW": 1, "CI": 1, "MM": 1, "AN": 1, "AQ": 1, "BQ": 1, "BV": 1, "IO": 1, "CX": 1, "CC": 1, "CK": 1, "CW": 1, "TF": 1, "GW": 1, "HM": 1, "XK": 1, "MS": 1, "NU": 1, "NF": 1, "PN": 1, "BL": 1, "SH": 1, "MF": 1, "PM": 1, "SX": 1, "GS": 1, "SS": 1, "SJ": 1, "TL": 1, "TK": 1, "UM": 1, "WF": 1, "EH": 1}, "max_campaign_duration": {"US": 90, "CA": 90, "GB": 90, "AR": 90, "AU": 90, "AT": 90, "BE": 90, "BR": 90, "CL": 90, "CN": 90, "CO": 90, "HR": 90, "DK": 90, "DO": 90, "EG": 90, "FI": 90, "FR": 90, "DE": 90, "GR": 90, "HK": 90, "IN": 90, "ID": 90, "IE": 90, "IL": 90, "IT": 90, "JP": 90, "JO": 90, "KW": 90, "LB": 90, "MY": 90, "MX": 90, "NL": 90, "NZ": 90, "NG": 90, "NO": 90, "PK": 90, "PA": 90, "PE": 90, "PH": 90, "PL": 90, "RU": 90, "SA": 90, "RS": 90, "SG": 90, "ZA": 90, "KR": 90, "ES": 90, "SE": 90, "CH": 90, "TW": 90, "TH": 90, "TR": 90, "AE": 90, "VE": 90, "PT": 90, "LU": 90, "BG": 90, "CZ": 90, "SI": 90, "IS": 90, "SK": 90, "LT": 90, "TT": 90, "BD": 90, "LK": 90, "KE": 90, "HU": 90, "MA": 90, "CY": 90, "JM": 90, "EC": 90, "RO": 90, "BO": 90, "GT": 90, "CR": 90, "QA": 90, "SV": 90, "HN": 90, "NI": 90, "PY": 90, "UY": 90, "PR": 90, "BA": 90, "PS": 90, "TN": 90, "BH": 90, "VN": 90, "GH": 90, "MU": 90, "UA": 90, "MT": 90, "BS": 90, "MV": 90, "OM": 90, "MK": 90, "LV": 90, "EE": 90, "IQ": 90, "DZ": 90, "AL": 90, "NP": 90, "MO": 90, "ME": 90, "SN": 90, "GE": 90, "BN": 90, "UG": 90, "GP": 90, "BB": 90, "AZ": 90, "TZ": 90, "LY": 90, "MQ": 90, "CM": 90, "BW": 90, "ET": 90, "KZ": 90, "NA": 90, "MG": 90, "NC": 90, "MD": 90, "FJ": 90, "BY": 90, "JE": 90, "GU": 90, "YE": 90, "ZM": 90, "IM": 90, "HT": 90, "KH": 90, "AW": 90, "PF": 90, "AF": 90, "BM": 90, "GY": 90, "AM": 90, "MW": 90, "AG": 90, "RW": 90, "GG": 90, "GM": 90, "FO": 90, "LC": 90, "KY": 90, "BJ": 90, "AD": 90, "GD": 90, "VI": 90, "BZ": 90, "VC": 90, "MN": 90, "MZ": 90, "ML": 90, "AO": 90, "GF": 90, "UZ": 90, "DJ": 90, "BF": 90, "MC": 90, "TG": 90, "GL": 90, "GA": 90, "GI": 90, "CD": 90, "KG": 90, "PG": 90, "BT": 90, "KN": 90, "SZ": 90, "LS": 90, "LA": 90, "LI": 90, "MP": 90, "SR": 90, "SC": 90, "VG": 90, "TC": 90, "DM": 90, "MR": 90, "AX": 90, "SM": 90, "SL": 90, "NE": 90, "CG": 90, "AI": 90, "YT": 90, "CV": 90, "GN": 90, "TM": 90, "BI": 90, "TJ": 90, "VU": 90, "SB": 90, "ER": 90, "WS": 90, "AS": 90, "FK": 90, "GQ": 90, "TO": 90, "KM": 90, "PW": 90, "FM": 90, "CF": 90, "SO": 90, "MH": 90, "VA": 90, "TD": 90, "KI": 90, "ST": 90, "TV": 90, "NR": 90, "RE": 90, "LR": 90, "ZW": 90, "CI": 90, "MM": 90, "AN": 90, "AQ": 90, "BQ": 90, "BV": 90, "IO": 90, "CX": 90, "CC": 90, "CK": 90, "CW": 90, "TF": 90, "GW": 90, "HM": 90, "XK": 90, "MS": 90, "NU": 90, "NF": 90, "PN": 90, "BL": 90, "SH": 90, "MF": 90, "PM": 90, "SX": 90, "GS": 90, "SS": 90, "SJ": 90, "TL": 90, "TK": 90, "UM": 90, "WF": 90, "EH": 90}, "max_days_to_finish": {"US": 180, "CA": 180, "GB": 180, "AR": 180, "AU": 180, "AT": 180, "BE": 180, "BR": 180, "CL": 180, "CN": 180, "CO": 180, "HR": 180, "DK": 180, "DO": 180, "EG": 180, "FI": 180, "FR": 180, "DE": 180, "GR": 180, "HK": 180, "IN": 180, "ID": 180, "IE": 180, "IL": 180, "IT": 180, "JP": 180, "JO": 180, "KW": 180, "LB": 180, "MY": 180, "MX": 180, "NL": 180, "NZ": 180, "NG": 180, "NO": 180, "PK": 180, "PA": 180, "PE": 180, "PH": 180, "PL": 180, "RU": 180, "SA": 180, "RS": 180, "SG": 180, "ZA": 180, "KR": 180, "ES": 180, "SE": 180, "CH": 180, "TW": 180, "TH": 180, "TR": 180, "AE": 180, "VE": 180, "PT": 180, "LU": 180, "BG": 180, "CZ": 180, "SI": 180, "IS": 180, "SK": 180, "LT": 180, "TT": 180, "BD": 180, "LK": 180, "KE": 180, "HU": 180, "MA": 180, "CY": 180, "JM": 180, "EC": 180, "RO": 180, "BO": 180, "GT": 180, "CR": 180, "QA": 180, "SV": 180, "HN": 180, "NI": 180, "PY": 180, "UY": 180, "PR": 180, "BA": 180, "PS": 180, "TN": 180, "BH": 180, "VN": 180, "GH": 180, "MU": 180, "UA": 180, "MT": 180, "BS": 180, "MV": 180, "OM": 180, "MK": 180, "LV": 180, "EE": 180, "IQ": 180, "DZ": 180, "AL": 180, "NP": 180, "MO": 180, "ME": 180, "SN": 180, "GE": 180, "BN": 180, "UG": 180, "GP": 180, "BB": 180, "AZ": 180, "TZ": 180, "LY": 180, "MQ": 180, "CM": 180, "BW": 180, "ET": 180, "KZ": 180, "NA": 180, "MG": 180, "NC": 180, "MD": 180, "FJ": 180, "BY": 180, "JE": 180, "GU": 180, "YE": 180, "ZM": 180, "IM": 180, "HT": 180, "KH": 180, "AW": 180, "PF": 180, "AF": 180, "BM": 180, "GY": 180, "AM": 180, "MW": 180, "AG": 180, "RW": 180, "GG": 180, "GM": 180, "FO": 180, "LC": 180, "KY": 180, "BJ": 180, "AD": 180, "GD": 180, "VI": 180, "BZ": 180, "VC": 180, "MN": 180, "MZ": 180, "ML": 180, "AO": 180, "GF": 180, "UZ": 180, "DJ": 180, "BF": 180, "MC": 180, "TG": 180, "GL": 180, "GA": 180, "GI": 180, "CD": 180, "KG": 180, "PG": 180, "BT": 180, "KN": 180, "SZ": 180, "LS": 180, "LA": 180, "LI": 180, "MP": 180, "SR": 180, "SC": 180, "VG": 180, "TC": 180, "DM": 180, "MR": 180, "AX": 180, "SM": 180, "SL": 180, "NE": 180, "CG": 180, "AI": 180, "YT": 180, "CV": 180, "GN": 180, "TM": 180, "BI": 180, "TJ": 180, "VU": 180, "SB": 180, "ER": 180, "WS": 180, "AS": 180, "FK": 180, "GQ": 180, "TO": 180, "KM": 180, "PW": 180, "FM": 180, "CF": 180, "SO": 180, "MH": 180, "VA": 180, "TD": 180, "KI": 180, "ST": 180, "TV": 180, "NR": 180, "RE": 180, "LR": 180, "ZW": 180, "CI": 180, "MM": 180, "AN": 180, "AQ": 180, "BQ": 180, "BV": 180, "IO": 180, "CX": 180, "CC": 180, "CK": 180, "CW": 180, "TF": 180, "GW": 180, "HM": 180, "XK": 180, "MS": 180, "NU": 180, "NF": 180, "PN": 180, "BL": 180, "SH": 180, "MF": 180, "PM": 180, "SX": 180, "GS": 180, "SS": 180, "SJ": 180, "TL": 180, "TK": 180, "UM": 180, "WF": 180, "EH": 180}, "global_io_max_campaign_duration": 100}, "spend_cap": "0", "tax_id_status": 0.0, "tax_id_type": "0", "timezone_id": 1.0, "timezone_name": "America/Los_Angeles", "timezone_offset_hours_utc": -8.0, "tos_accepted": {"web_custom_audience_tos": 1}, "user_tasks": ["DRAFT", "ANALYZE", "ADVERTISE", "MANAGE"]}, "emitted_at": 1708350623793} +{"stream": "ad_account", "data": {"id": "act_212551616838260", "account_id": "212551616838260", "account_status": 1, "age": 1402.6937847222, "amount_spent": "39125", "balance": "0", "business": {"id": "1506473679510495", "name": "Airbyte"}, "business_city": "", "business_country_code": "US", "business_name": "", "business_street": "", "business_street2": "", "can_create_brand_lift_study": false, "capabilities": ["CAN_CREATE_CALL_ADS", "CAN_SEE_GROWTH_OPPORTUNITY_DATA", "ENABLE_IA_RECIRC_AD_DISPLAY_FORMAT", "CAN_USE_MOBILE_EXTERNAL_PAGE_TYPE", "CAN_USE_FB_FEED_POSITION_IN_VIDEO_VIEW_15S", "ENABLE_BIZ_DISCO_ADS", "ENABLE_BRAND_OBJECTIVES_FOR_BIZ_DISCO_ADS", "ENABLE_DIRECT_REACH_FOR_BIZ_DISCO_ADS", "ENABLE_DYNAMIC_ADS_ON_IG_STORIES_ADS", "ENABLE_IG_STORIES_ADS_PPE_OBJECTIVE", "ENABLE_IG_STORIES_ADS_MESSENGER_DESTINATION", "ENABLE_PAC_FOR_BIZ_DISCO_ADS", "CAN_USE_FB_INSTREAM_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_FB_STORY_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_AN_INSTREAM_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_IG_STORY_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_FB_IA_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_FB_SUG_VIDEO_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_FB_MKT_PLACE_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_IG_FEED_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_IG_EXPLORE_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_AN_CLASSIC_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_AN_REWARD_VIDEO_POSITION_IN_VIDEO_VIEW_15S", "CAN_USE_REACH_AND_FREQUENCY", "CAN_USE_RECURRING_BUDGET", "HAS_VALID_PAYMENT_METHODS", "CAN_USE_LINK_CLICK_BILLING_EVENT", "CAN_USE_CPA_BILLING_EVENT", "CAN_SEE_NEW_CONVERSION_WINDOW_NUX", "ADS_INSTREAM_INTERFACE_INTEGRITY", "ADS_INSTREAM_LINK_CLICK", "ADS_INSTREAM_LINK_CLICK_IMAGE", "ADS_IN_OBJECTIVES_DEPRECATION", "MESSENGER_INBOX_ADS_PRODUCT_CATALOG_SALES", "CAN_SHOW_MESSENGER_DUPLICSTION_UPSELL", "ALLOW_INSTREAM_ONLY_FOR_REACH", "ADS_INSTREAM_VIDEO_PLACEMENT_CONVERSIONS", "CAN_CREATE_INSTAGRAM_EXPLORE_ADS", "ALLOW_INSTREAM_VIDEOS_PLACEMENT_ONLY", "ALLOW_INSTREAM_NON_INTERRUPTIVE_LEADGEN", "INSTREAM_VIDEO_AD_DESKTOP_CONVERSION_AD_PREVIEW", "ALLOW_INSTREAM_ONLY_FOR_BRAND_AWARENESS_AUCTION", "ALLOW_SUGGESTED_VIDEOS_PLACEMENT_ONLY", "WHATSAPP_DESTINATION_ADS", "CTM_ADS_CREATION_CLICK_TO_DIRECT", "CTW_ADS_ENABLE_IG_FEED_PLACEMENT", "CTW_ADS_FOR_NON_MESSAGES_OBJECTIVE", "CTW_ADS_TRUSTED_TIER_2_PLUS_ADVERTISER", "CTW_ADS_TRUSTED_TIER_ADVERTISER", "ADS_PLACEMENT_MARKETPLACE", "ADNW_DISABLE_INSTREAM_AND_WEB_PLACEMENT", "CAN_CHANGE_BILLING_THRESHOLD", "CAN_USE_APP_EVENT_AVERAGE_COST_BIDDING", "CAN_USE_LEAD_GEN_AVERAGE_COST_BIDDING", "ADS_VALUE_OPTIMIZATION_DYNAMIC_ADS_1D", "ADS_DELIVERY_INSIGHTS_IN_BIDDING_PRESET_EXPERIMENT", "ADS_DELIVERY_INSIGHTS_OPTIMIZATION_PRESET", "CAN_SEE_APP_AD_EVENTS", "CAN_SEE_NEW_STANDARD_EVENTS_BETA", "CAN_SEE_VCK_HOLIDAY_TEMPLATES", "ENABLE_DCO_FOR_FB_STORY_ADS", "CAN_USE_IG_EXPLORE_GRID_HOME_PLACEMENT", "CAN_USE_IG_EXPLORE_HOME_IN_REACH_AND_FREQUENCY", "CAN_USE_IG_EXPLORE_HOME_POST_ENGAGEMENT_MESSAGES", "CAN_USE_IG_SEARCH_PLACEMENT", "CAN_USE_IG_SEARCH_RESULTS_AUTO_PLACEMENT", "CAN_USE_IG_REELS_PAC_CAROUSEL", "CAN_USE_IG_REELS_POSITION", "CAN_SEE_CONVERSION_LIFT_SUMMARY", "CAN_USE_IG_PROFILE_FEED_POSITION", "CAN_USE_IG_REELS_REACH_AND_FREQUENCY", "CAN_USE_IG_REELS_OVERLAY_POSITION", "CAN_USE_IG_REELS_OVERLAY_PAC", "CAN_USE_IG_SHOP_TAB_PAC", "CAN_SEE_LEARNING_STAGE", "ENABLE_WEBSITE_CONVERSIONS_FOR_FB_STORY_ADS", "ENABLE_MESSENGER_INBOX_VIDEO_ADS", "ENABLE_VIDEO_VIEWS_FOR_FB_STORY_ADS", "ENABLE_LINK_CLICKS_FOR_FB_STORY_ADS", "ENABLE_REACH_FOR_FB_STORY_ADS", "CAN_USE_CALL_TO_ACTION_LINK_IMPORT_EXPORT", "ADS_INSTREAM_VIDEO_ENABLE_SLIDE_SHOW", "ALLOW_INSTREAM_VIDEOS_PLACEMENT_ONLY_IN_VV_REACH_AND_FREQUENCY", "ENABLE_MOBILE_APP_INSTALLS_FOR_FB_STORY_ADS", "ENABLE_LEAD_GEN_FOR_FB_STORY_ADS", "CAN_USE_FB_MKT_PLACE_POSITION_IN_REACH", "CAN_USE_FB_MKT_PLACE_POSITION_IN_VIDEO_VIEW", "CAN_USE_FB_MKT_PLACE_POSITION_IN_STORE_VISIT", "ENABLE_MOBILE_APP_ENGAGEMENT_FOR_FB_STORY_ADS", "CAN_USE_FB_MKT_PLACE_POSITION_IN_BRAND_AWARENESS", "CAN_USE_FB_MKT_PLACE_POSITION_IN_APP_INSTALLS", "CAN_USE_FB_MKT_PLACE_POSITION_IN_LEAD_GENERATION", "CAN_USE_FB_MKT_PLACE_POSITION_IN_MESSAGE", "CAN_USE_FB_MKT_PLACE_POSITION_IN_PAGE_LIKE", "CAN_USE_FB_MKT_PLACE_POSITION_IN_POST_ENGAGEMENT", "RF_ALLOW_MARKETPLACE_ACCOUNT", "RF_ALLOW_SEARCH_ACCOUNT", "VERTICAL_VIDEO_PAC_INSTREAM_UPSELL", "IX_COLLECTION_ENABLED_FOR_BAO_AND_REACH", "ADS_BM_REQUIREMENTS_OCT_15_RELEASE", "ENABLE_POST_ENGAGEMENT_FOR_FB_STORY", "ENBABLE_CATALOG_SALES_FOR_FB_STORY", "CAN_USE_WHATSAPP_DESTINATION_ON_LINK_CLICKS_AND_CONVERSIONS", "CAN_USE_WHATSAPP_DESTINATION_ON_CONVERSIONS", "IS_NON_TAIL_AD_ACCOUNT", "IS_IN_IG_EXISTING_POST_CTA_DEFAULTING_EXPERIMENT", "IS_IN_SHORT_WA_LINK_CTWA_UNCONV_TRAFFIC_EXPERIMENT", "IS_IN_ODAX_EXPERIENCE", "IS_IN_REACH_BRAND_AWARENESS_WHATSAPP_L1_DESTINATION_EXPERIMENT", "IS_IN_VIDEO_VIEWS_WHATSAPP_L1_DESTINATION_EXPERIMENT", "IS_IN_WHATSAPP_DESTINATION_DEFAULTING_EXPERIMENT", "CAN_USE_MARKETPLACE_DESKTOP", "ADS_MERCHANT_OVERLAYS_DEPRECATION", "CONNECTIONS_DEPRECATION_V2", "CAN_USE_LIVE_VIDEO_FOR_THRUPLAY", "CAN_SEE_HEC_AM_FLOW", "CAN_SEE_POLITICAL_FLOW", "ADS_INSTREAM_PLACEMENT_CATALOG_SALES", "ENABLE_CONVERSIONS_FOR_FB_GROUP_TAB_ADS", "ENABLE_LINK_CLICK_FOR_FB_GROUP_TAB_ADS", "ENABLE_REACH_FOR_FB_GROUP_TAB_ADS", "CAN_USE_CONVERSATIONS_OPTIMIZATION", "ENABLE_THRUPLAY_OPTIMIZATION_MESSENGER_STORY_ADS", "CAN_USE_IG_STORY_POLLS_PAC_CREATION", "IOS14_CEO_CAMPAIGN_CREATION", "ENABLE_VIDEO_CHANNEL_PLACEMENT_FOR_RSVP_ADS", "DIGITAL_CIRCULAR_ADS", "CAN_SEE_SAFR_V3_FLOW", "CAN_USE_FB_REELS_POSITION", "CAN_USE_ADS_ON_FB_REELS_POSITION", "CAN_USE_FB_REELS_AUTO_PLACEMENT", "ENABLE_FB_REELS_CREATION_PAC_ADS", "ENABLE_FB_REELS_CREATION_DCO_ADS", "ENABLE_FB_REELS_POSTLOOP_CREATION_DCO_ADS", "ENABLE_FB_REELS_POSTLOOP_CREATION_PAC_ADS", "RF_CPA_BILLING_DEPRECATION_PHASE_2", "ENABLE_APP_INSTALL_CUSTOM_PRODUCT_PAGES", "ENABLE_ADS_ON_FB_REELS_PLACEMENT_UNIFICATION", "ADS_RF_FB_REELS_PLACEMENT", "REELS_DM_ADS_ENABLE_REACH_AND_FREQUENCY", "ELIGIBLE_FOR_TEXT_GEN", "CAN_USE_BUDGET_SCHEDULING_API", "ADS_AEMV2_HAS_LAUNCHED"], "created_time": "2020-04-13T18:04:59-0700", "currency": "USD", "disable_reason": 0.0, "end_advertiser": 1506473679510495.0, "end_advertiser_name": "Airbyte", "fb_entity": 85.0, "funding_source": 2825262454257003.0, "funding_source_details": {"id": "2825262454257003", "type": 1}, "has_migrated_permissions": true, "is_attribution_spec_system_default": true, "is_direct_deals_enabled": false, "is_in_3ds_authorization_enabled_market": false, "is_notifications_enabled": true, "is_personal": 0.0, "is_prepay_account": false, "is_tax_id_required": false, "min_campaign_group_spend_cap": 10000.0, "min_daily_budget": 100.0, "name": "Airbyte", "offsite_pixels_tos_accepted": true, "owner": 1506473679510495.0, "rf_spec": {"min_reach_limits": {"US": 200000, "CA": 200000, "GB": 200000, "AR": 200000, "AU": 200000, "AT": 200000, "BE": 200000, "BR": 200000, "CL": 200000, "CN": 200000, "CO": 200000, "HR": 200000, "DK": 200000, "DO": 200000, "EG": 200000, "FI": 200000, "FR": 200000, "DE": 200000, "GR": 200000, "HK": 200000, "IN": 200000, "ID": 200000, "IE": 200000, "IL": 200000, "IT": 200000, "JP": 200000, "JO": 200000, "KW": 200000, "LB": 200000, "MY": 200000, "MX": 200000, "NL": 200000, "NZ": 200000, "NG": 200000, "NO": 200000, "PK": 200000, "PA": 200000, "PE": 200000, "PH": 200000, "PL": 200000, "RU": 200000, "SA": 200000, "RS": 200000, "SG": 200000, "ZA": 200000, "KR": 200000, "ES": 200000, "SE": 200000, "CH": 200000, "TW": 200000, "TH": 200000, "TR": 200000, "AE": 200000, "VE": 200000, "PT": 200000, "LU": 200000, "BG": 200000, "CZ": 200000, "SI": 200000, "IS": 200000, "SK": 200000, "LT": 200000, "TT": 200000, "BD": 200000, "LK": 200000, "KE": 200000, "HU": 200000, "MA": 200000, "CY": 200000, "JM": 200000, "EC": 200000, "RO": 200000, "BO": 200000, "GT": 200000, "CR": 200000, "QA": 200000, "SV": 200000, "HN": 200000, "NI": 200000, "PY": 200000, "UY": 200000, "PR": 200000, "BA": 200000, "PS": 200000, "TN": 200000, "BH": 200000, "VN": 200000, "GH": 200000, "MU": 200000, "UA": 200000, "MT": 200000, "BS": 200000, "MV": 200000, "OM": 200000, "MK": 200000, "LV": 200000, "EE": 200000, "IQ": 200000, "DZ": 200000, "AL": 200000, "NP": 200000, "MO": 200000, "ME": 200000, "SN": 200000, "GE": 200000, "BN": 200000, "UG": 200000, "GP": 200000, "BB": 200000, "AZ": 200000, "TZ": 200000, "LY": 200000, "MQ": 200000, "CM": 200000, "BW": 200000, "ET": 200000, "KZ": 200000, "NA": 200000, "MG": 200000, "NC": 200000, "MD": 200000, "FJ": 200000, "BY": 200000, "JE": 200000, "GU": 200000, "YE": 200000, "ZM": 200000, "IM": 200000, "HT": 200000, "KH": 200000, "AW": 200000, "PF": 200000, "AF": 200000, "BM": 200000, "GY": 200000, "AM": 200000, "MW": 200000, "AG": 200000, "RW": 200000, "GG": 200000, "GM": 200000, "FO": 200000, "LC": 200000, "KY": 200000, "BJ": 200000, "AD": 200000, "GD": 200000, "VI": 200000, "BZ": 200000, "VC": 200000, "MN": 200000, "MZ": 200000, "ML": 200000, "AO": 200000, "GF": 200000, "UZ": 200000, "DJ": 200000, "BF": 200000, "MC": 200000, "TG": 200000, "GL": 200000, "GA": 200000, "GI": 200000, "CD": 200000, "KG": 200000, "PG": 200000, "BT": 200000, "KN": 200000, "SZ": 200000, "LS": 200000, "LA": 200000, "LI": 200000, "MP": 200000, "SR": 200000, "SC": 200000, "VG": 200000, "TC": 200000, "DM": 200000, "MR": 200000, "AX": 200000, "SM": 200000, "SL": 200000, "NE": 200000, "CG": 200000, "AI": 200000, "YT": 200000, "CV": 200000, "GN": 200000, "TM": 200000, "BI": 200000, "TJ": 200000, "VU": 200000, "SB": 200000, "ER": 200000, "WS": 200000, "AS": 200000, "FK": 200000, "GQ": 200000, "TO": 200000, "KM": 200000, "PW": 200000, "FM": 200000, "CF": 200000, "SO": 200000, "MH": 200000, "VA": 200000, "TD": 200000, "KI": 200000, "ST": 200000, "TV": 200000, "NR": 200000, "RE": 200000, "LR": 200000, "ZW": 200000, "CI": 200000, "MM": 200000, "AN": 200000, "AQ": 200000, "BQ": 200000, "BV": 200000, "IO": 200000, "CX": 200000, "CC": 200000, "CK": 200000, "CW": 200000, "TF": 200000, "GW": 200000, "HM": 200000, "XK": 200000, "MS": 200000, "NU": 200000, "NF": 200000, "PN": 200000, "BL": 200000, "SH": 200000, "MF": 200000, "PM": 200000, "SX": 200000, "GS": 200000, "SS": 200000, "SJ": 200000, "TL": 200000, "TK": 200000, "UM": 200000, "WF": 200000, "EH": 200000}, "countries": ["US", "CA", "GB", "AR", "AU", "AT", "BE", "BR", "CL", "CN", "CO", "HR", "DK", "DO", "EG", "FI", "FR", "DE", "GR", "HK", "IN", "ID", "IE", "IL", "IT", "JP", "JO", "KW", "LB", "MY", "MX", "NL", "NZ", "NG", "NO", "PK", "PA", "PE", "PH", "PL", "RU", "SA", "RS", "SG", "ZA", "KR", "ES", "SE", "CH", "TW", "TH", "TR", "AE", "VE", "PT", "LU", "BG", "CZ", "SI", "IS", "SK", "LT", "TT", "BD", "LK", "KE", "HU", "MA", "CY", "JM", "EC", "RO", "BO", "GT", "CR", "QA", "SV", "HN", "NI", "PY", "UY", "PR", "BA", "PS", "TN", "BH", "VN", "GH", "MU", "UA", "MT", "BS", "MV", "OM", "MK", "EE", "LV", "IQ", "DZ", "AL", "NP", "MO", "ME", "SN", "GE", "BN", "UG", "GP", "BB", "ZW", "CI", "AZ", "TZ", "LY", "MQ", "MM", "CM", "BW", "ET", "KZ", "NA", "MG", "NC", "MD", "FJ", "BY", "JE", "GU", "YE", "ZM", "IM", "HT", "KH", "AW", "PF", "AF", "BM", "GY", "AM", "MW", "AG", "RW", "GG", "GM", "FO", "LC", "KY", "BJ", "AD", "GD", "VI", "BZ", "VC", "MN", "MZ", "ML", "AO", "GF", "UZ", "DJ", "BF", "MC", "TG", "GL", "GA", "GI", "CD", "KG", "PG", "BT", "KN", "SZ", "LS", "LA", "LI", "MP", "SR", "SC", "VG", "TC", "DM", "MR", "AX", "SM", "SL", "NE", "CG", "AI", "YT", "LR", "CV", "GN", "TM", "BI", "TJ", "VU", "SB", "ER", "WS", "AS", "FK", "GQ", "TO", "KM", "PW", "FM", "CF", "SO", "MH", "VA", "TD", "KI", "ST", "TV", "NR", "RE", "AN", "AQ", "BQ", "BV", "IO", "CX", "CC", "CK", "CW", "TF", "GW", "HM", "XK", "MS", "NU", "NF", "PN", "BL", "SH", "MF", "PM", "SX", "GS", "SS", "SJ", "TL", "TK", "UM", "WF", "EH"], "min_campaign_duration": {"US": 1, "CA": 1, "GB": 1, "AR": 1, "AU": 1, "AT": 1, "BE": 1, "BR": 1, "CL": 1, "CN": 1, "CO": 1, "HR": 1, "DK": 1, "DO": 1, "EG": 1, "FI": 1, "FR": 1, "DE": 1, "GR": 1, "HK": 1, "IN": 1, "ID": 1, "IE": 1, "IL": 1, "IT": 1, "JP": 1, "JO": 1, "KW": 1, "LB": 1, "MY": 1, "MX": 1, "NL": 1, "NZ": 1, "NG": 1, "NO": 1, "PK": 1, "PA": 1, "PE": 1, "PH": 1, "PL": 1, "RU": 1, "SA": 1, "RS": 1, "SG": 1, "ZA": 1, "KR": 1, "ES": 1, "SE": 1, "CH": 1, "TW": 1, "TH": 1, "TR": 1, "AE": 1, "VE": 1, "PT": 1, "LU": 1, "BG": 1, "CZ": 1, "SI": 1, "IS": 1, "SK": 1, "LT": 1, "TT": 1, "BD": 1, "LK": 1, "KE": 1, "HU": 1, "MA": 1, "CY": 1, "JM": 1, "EC": 1, "RO": 1, "BO": 1, "GT": 1, "CR": 1, "QA": 1, "SV": 1, "HN": 1, "NI": 1, "PY": 1, "UY": 1, "PR": 1, "BA": 1, "PS": 1, "TN": 1, "BH": 1, "VN": 1, "GH": 1, "MU": 1, "UA": 1, "MT": 1, "BS": 1, "MV": 1, "OM": 1, "MK": 1, "LV": 1, "EE": 1, "IQ": 1, "DZ": 1, "AL": 1, "NP": 1, "MO": 1, "ME": 1, "SN": 1, "GE": 1, "BN": 1, "UG": 1, "GP": 1, "BB": 1, "AZ": 1, "TZ": 1, "LY": 1, "MQ": 1, "CM": 1, "BW": 1, "ET": 1, "KZ": 1, "NA": 1, "MG": 1, "NC": 1, "MD": 1, "FJ": 1, "BY": 1, "JE": 1, "GU": 1, "YE": 1, "ZM": 1, "IM": 1, "HT": 1, "KH": 1, "AW": 1, "PF": 1, "AF": 1, "BM": 1, "GY": 1, "AM": 1, "MW": 1, "AG": 1, "RW": 1, "GG": 1, "GM": 1, "FO": 1, "LC": 1, "KY": 1, "BJ": 1, "AD": 1, "GD": 1, "VI": 1, "BZ": 1, "VC": 1, "MN": 1, "MZ": 1, "ML": 1, "AO": 1, "GF": 1, "UZ": 1, "DJ": 1, "BF": 1, "MC": 1, "TG": 1, "GL": 1, "GA": 1, "GI": 1, "CD": 1, "KG": 1, "PG": 1, "BT": 1, "KN": 1, "SZ": 1, "LS": 1, "LA": 1, "LI": 1, "MP": 1, "SR": 1, "SC": 1, "VG": 1, "TC": 1, "DM": 1, "MR": 1, "AX": 1, "SM": 1, "SL": 1, "NE": 1, "CG": 1, "AI": 1, "YT": 1, "CV": 1, "GN": 1, "TM": 1, "BI": 1, "TJ": 1, "VU": 1, "SB": 1, "ER": 1, "WS": 1, "AS": 1, "FK": 1, "GQ": 1, "TO": 1, "KM": 1, "PW": 1, "FM": 1, "CF": 1, "SO": 1, "MH": 1, "VA": 1, "TD": 1, "KI": 1, "ST": 1, "TV": 1, "NR": 1, "RE": 1, "LR": 1, "ZW": 1, "CI": 1, "MM": 1, "AN": 1, "AQ": 1, "BQ": 1, "BV": 1, "IO": 1, "CX": 1, "CC": 1, "CK": 1, "CW": 1, "TF": 1, "GW": 1, "HM": 1, "XK": 1, "MS": 1, "NU": 1, "NF": 1, "PN": 1, "BL": 1, "SH": 1, "MF": 1, "PM": 1, "SX": 1, "GS": 1, "SS": 1, "SJ": 1, "TL": 1, "TK": 1, "UM": 1, "WF": 1, "EH": 1}, "max_campaign_duration": {"US": 90, "CA": 90, "GB": 90, "AR": 90, "AU": 90, "AT": 90, "BE": 90, "BR": 90, "CL": 90, "CN": 90, "CO": 90, "HR": 90, "DK": 90, "DO": 90, "EG": 90, "FI": 90, "FR": 90, "DE": 90, "GR": 90, "HK": 90, "IN": 90, "ID": 90, "IE": 90, "IL": 90, "IT": 90, "JP": 90, "JO": 90, "KW": 90, "LB": 90, "MY": 90, "MX": 90, "NL": 90, "NZ": 90, "NG": 90, "NO": 90, "PK": 90, "PA": 90, "PE": 90, "PH": 90, "PL": 90, "RU": 90, "SA": 90, "RS": 90, "SG": 90, "ZA": 90, "KR": 90, "ES": 90, "SE": 90, "CH": 90, "TW": 90, "TH": 90, "TR": 90, "AE": 90, "VE": 90, "PT": 90, "LU": 90, "BG": 90, "CZ": 90, "SI": 90, "IS": 90, "SK": 90, "LT": 90, "TT": 90, "BD": 90, "LK": 90, "KE": 90, "HU": 90, "MA": 90, "CY": 90, "JM": 90, "EC": 90, "RO": 90, "BO": 90, "GT": 90, "CR": 90, "QA": 90, "SV": 90, "HN": 90, "NI": 90, "PY": 90, "UY": 90, "PR": 90, "BA": 90, "PS": 90, "TN": 90, "BH": 90, "VN": 90, "GH": 90, "MU": 90, "UA": 90, "MT": 90, "BS": 90, "MV": 90, "OM": 90, "MK": 90, "LV": 90, "EE": 90, "IQ": 90, "DZ": 90, "AL": 90, "NP": 90, "MO": 90, "ME": 90, "SN": 90, "GE": 90, "BN": 90, "UG": 90, "GP": 90, "BB": 90, "AZ": 90, "TZ": 90, "LY": 90, "MQ": 90, "CM": 90, "BW": 90, "ET": 90, "KZ": 90, "NA": 90, "MG": 90, "NC": 90, "MD": 90, "FJ": 90, "BY": 90, "JE": 90, "GU": 90, "YE": 90, "ZM": 90, "IM": 90, "HT": 90, "KH": 90, "AW": 90, "PF": 90, "AF": 90, "BM": 90, "GY": 90, "AM": 90, "MW": 90, "AG": 90, "RW": 90, "GG": 90, "GM": 90, "FO": 90, "LC": 90, "KY": 90, "BJ": 90, "AD": 90, "GD": 90, "VI": 90, "BZ": 90, "VC": 90, "MN": 90, "MZ": 90, "ML": 90, "AO": 90, "GF": 90, "UZ": 90, "DJ": 90, "BF": 90, "MC": 90, "TG": 90, "GL": 90, "GA": 90, "GI": 90, "CD": 90, "KG": 90, "PG": 90, "BT": 90, "KN": 90, "SZ": 90, "LS": 90, "LA": 90, "LI": 90, "MP": 90, "SR": 90, "SC": 90, "VG": 90, "TC": 90, "DM": 90, "MR": 90, "AX": 90, "SM": 90, "SL": 90, "NE": 90, "CG": 90, "AI": 90, "YT": 90, "CV": 90, "GN": 90, "TM": 90, "BI": 90, "TJ": 90, "VU": 90, "SB": 90, "ER": 90, "WS": 90, "AS": 90, "FK": 90, "GQ": 90, "TO": 90, "KM": 90, "PW": 90, "FM": 90, "CF": 90, "SO": 90, "MH": 90, "VA": 90, "TD": 90, "KI": 90, "ST": 90, "TV": 90, "NR": 90, "RE": 90, "LR": 90, "ZW": 90, "CI": 90, "MM": 90, "AN": 90, "AQ": 90, "BQ": 90, "BV": 90, "IO": 90, "CX": 90, "CC": 90, "CK": 90, "CW": 90, "TF": 90, "GW": 90, "HM": 90, "XK": 90, "MS": 90, "NU": 90, "NF": 90, "PN": 90, "BL": 90, "SH": 90, "MF": 90, "PM": 90, "SX": 90, "GS": 90, "SS": 90, "SJ": 90, "TL": 90, "TK": 90, "UM": 90, "WF": 90, "EH": 90}, "max_days_to_finish": {"US": 180, "CA": 180, "GB": 180, "AR": 180, "AU": 180, "AT": 180, "BE": 180, "BR": 180, "CL": 180, "CN": 180, "CO": 180, "HR": 180, "DK": 180, "DO": 180, "EG": 180, "FI": 180, "FR": 180, "DE": 180, "GR": 180, "HK": 180, "IN": 180, "ID": 180, "IE": 180, "IL": 180, "IT": 180, "JP": 180, "JO": 180, "KW": 180, "LB": 180, "MY": 180, "MX": 180, "NL": 180, "NZ": 180, "NG": 180, "NO": 180, "PK": 180, "PA": 180, "PE": 180, "PH": 180, "PL": 180, "RU": 180, "SA": 180, "RS": 180, "SG": 180, "ZA": 180, "KR": 180, "ES": 180, "SE": 180, "CH": 180, "TW": 180, "TH": 180, "TR": 180, "AE": 180, "VE": 180, "PT": 180, "LU": 180, "BG": 180, "CZ": 180, "SI": 180, "IS": 180, "SK": 180, "LT": 180, "TT": 180, "BD": 180, "LK": 180, "KE": 180, "HU": 180, "MA": 180, "CY": 180, "JM": 180, "EC": 180, "RO": 180, "BO": 180, "GT": 180, "CR": 180, "QA": 180, "SV": 180, "HN": 180, "NI": 180, "PY": 180, "UY": 180, "PR": 180, "BA": 180, "PS": 180, "TN": 180, "BH": 180, "VN": 180, "GH": 180, "MU": 180, "UA": 180, "MT": 180, "BS": 180, "MV": 180, "OM": 180, "MK": 180, "LV": 180, "EE": 180, "IQ": 180, "DZ": 180, "AL": 180, "NP": 180, "MO": 180, "ME": 180, "SN": 180, "GE": 180, "BN": 180, "UG": 180, "GP": 180, "BB": 180, "AZ": 180, "TZ": 180, "LY": 180, "MQ": 180, "CM": 180, "BW": 180, "ET": 180, "KZ": 180, "NA": 180, "MG": 180, "NC": 180, "MD": 180, "FJ": 180, "BY": 180, "JE": 180, "GU": 180, "YE": 180, "ZM": 180, "IM": 180, "HT": 180, "KH": 180, "AW": 180, "PF": 180, "AF": 180, "BM": 180, "GY": 180, "AM": 180, "MW": 180, "AG": 180, "RW": 180, "GG": 180, "GM": 180, "FO": 180, "LC": 180, "KY": 180, "BJ": 180, "AD": 180, "GD": 180, "VI": 180, "BZ": 180, "VC": 180, "MN": 180, "MZ": 180, "ML": 180, "AO": 180, "GF": 180, "UZ": 180, "DJ": 180, "BF": 180, "MC": 180, "TG": 180, "GL": 180, "GA": 180, "GI": 180, "CD": 180, "KG": 180, "PG": 180, "BT": 180, "KN": 180, "SZ": 180, "LS": 180, "LA": 180, "LI": 180, "MP": 180, "SR": 180, "SC": 180, "VG": 180, "TC": 180, "DM": 180, "MR": 180, "AX": 180, "SM": 180, "SL": 180, "NE": 180, "CG": 180, "AI": 180, "YT": 180, "CV": 180, "GN": 180, "TM": 180, "BI": 180, "TJ": 180, "VU": 180, "SB": 180, "ER": 180, "WS": 180, "AS": 180, "FK": 180, "GQ": 180, "TO": 180, "KM": 180, "PW": 180, "FM": 180, "CF": 180, "SO": 180, "MH": 180, "VA": 180, "TD": 180, "KI": 180, "ST": 180, "TV": 180, "NR": 180, "RE": 180, "LR": 180, "ZW": 180, "CI": 180, "MM": 180, "AN": 180, "AQ": 180, "BQ": 180, "BV": 180, "IO": 180, "CX": 180, "CC": 180, "CK": 180, "CW": 180, "TF": 180, "GW": 180, "HM": 180, "XK": 180, "MS": 180, "NU": 180, "NF": 180, "PN": 180, "BL": 180, "SH": 180, "MF": 180, "PM": 180, "SX": 180, "GS": 180, "SS": 180, "SJ": 180, "TL": 180, "TK": 180, "UM": 180, "WF": 180, "EH": 180}, "global_io_max_campaign_duration": 100}, "spend_cap": "0", "tax_id_status": 0.0, "tax_id_type": "0", "timezone_id": 1.0, "timezone_name": "America/Los_Angeles", "timezone_offset_hours_utc": -8.0, "tos_accepted": {"web_custom_audience_tos": 1}, "user_tasks": ["DRAFT", "ANALYZE", "ADVERTISE", "MANAGE"]}, "emitted_at": 1708020062150} {"stream": "ads", "data": {"id": "23853620229650398", "bid_type": "ABSOLUTE_OCPM", "account_id": "212551616838260", "campaign_id": "23853619670350398", "adset_id": "23853619670380398", "status": "ACTIVE", "creative": {"id": "23853666124230398"}, "updated_time": "2023-03-21T22:41:46-0700", "created_time": "2023-03-17T08:04:31-0700", "name": "With The Highest Standard for Reliability", "targeting": {"age_max": 60, "age_min": 18, "custom_audiences": [{"id": "23853630753300398", "name": "Lookalike (US, 10%) - Airbyte Cloud Users"}, {"id": "23853683587660398", "name": "Web Traffic [ALL] - _copy"}], "geo_locations": {"countries": ["US"], "location_types": ["home", "recent"]}, "brand_safety_content_filter_levels": ["FACEBOOK_STANDARD", "AN_STANDARD"], "targeting_relaxation_types": {"lookalike": 1, "custom_audience": 1}, "publisher_platforms": ["facebook", "instagram", "audience_network", "messenger"], "facebook_positions": ["feed", "biz_disco_feed", "facebook_reels", "facebook_reels_overlay", "right_hand_column", "video_feeds", "instant_article", "instream_video", "marketplace", "story", "search"], "instagram_positions": ["stream", "story", "explore", "reels", "shop", "explore_home", "profile_feed"], "device_platforms": ["mobile", "desktop"], "messenger_positions": ["story"], "audience_network_positions": ["classic", "instream_video", "rewarded_video"]}, "effective_status": "ACTIVE", "last_updated_by_app_id": "119211728144504", "source_ad_id": "0", "tracking_specs": [{"action.type": ["offsite_conversion"], "fb_pixel": ["917042523049733"]}, {"action.type": ["link_click"], "post": ["662226902575095"], "post.wall": ["112704783733939"]}, {"action.type": ["post_engagement"], "page": ["112704783733939"], "post": ["662226902575095"]}], "conversion_specs": [{"action.type": ["offsite_conversion"], "conversion_id": ["6015304265216283"]}]}, "emitted_at": 1707135365030} {"stream": "ad_sets", "data": {"id": "23853619670380398", "name": "Lookalike audience_Free Connector Program", "promoted_object": {"pixel_id": "917042523049733", "custom_event_type": "COMPLETE_REGISTRATION"}, "account_id": "212551616838260", "updated_time": "2023-03-21T14:20:51-0700", "daily_budget": 2000.0, "budget_remaining": 2000.0, "effective_status": "ACTIVE", "campaign_id": "23853619670350398", "created_time": "2023-03-17T08:04:28-0700", "start_time": "2023-03-17T08:04:28-0700", "lifetime_budget": 0.0, "targeting": {"age_max": 60, "age_min": 18, "custom_audiences": [{"id": "23853630753300398", "name": "Lookalike (US, 10%) - Airbyte Cloud Users"}, {"id": "23853683587660398", "name": "Web Traffic [ALL] - _copy"}], "geo_locations": {"countries": ["US"], "location_types": ["home", "recent"]}, "brand_safety_content_filter_levels": ["FACEBOOK_STANDARD", "AN_STANDARD"], "targeting_relaxation_types": {"lookalike": 1, "custom_audience": 1}, "publisher_platforms": ["facebook", "instagram", "audience_network", "messenger"], "facebook_positions": ["feed", "biz_disco_feed", "facebook_reels", "facebook_reels_overlay", "right_hand_column", "video_feeds", "instant_article", "instream_video", "marketplace", "story", "search"], "instagram_positions": ["stream", "story", "explore", "reels", "shop", "explore_home", "profile_feed"], "device_platforms": ["mobile", "desktop"], "messenger_positions": ["story"], "audience_network_positions": ["classic", "instream_video", "rewarded_video"]}, "bid_strategy": "LOWEST_COST_WITHOUT_CAP"}, "emitted_at": 1707135364623} -{"stream": "campaigns", "data": {"id": "23846542053890398", "account_id": "212551616838260", "budget_rebalance_flag": false, "budget_remaining": 0.0,"buying_type": "AUCTION", "created_time": "2021-01-18T21:36:42-0800", "configured_status": "PAUSED", "effective_status": "PAUSED", "name": "Fake Campaign 0", "objective": "MESSAGES", "smart_promotion_type": "GUIDED_CREATION", "source_campaign_id": 0.0, "special_ad_category": "NONE", "start_time": "1969-12-31T15:59:59-0800", "status": "PAUSED", "updated_time": "2021-02-18T01:00:02-0800"}, "emitted_at": 1694795155769} +{"stream":"campaigns","data":{"id":"23846542053890398","account_id":"212551616838260","budget_rebalance_flag":false,"budget_remaining":0.0,"buying_type":"AUCTION","created_time":"2021-01-18T21:36:42-0800","configured_status":"PAUSED","effective_status":"PAUSED","name":"Fake Campaign 0","objective":"MESSAGES","smart_promotion_type":"GUIDED_CREATION","source_campaign_id":0.0,"special_ad_category":"NONE","start_time":"1969-12-31T15:59:59-0800","status":"PAUSED","updated_time":"2021-02-18T01:00:02-0800"},"emitted_at":1694795155769} {"stream": "custom_audiences", "data": {"id": "23853683587660398", "account_id": "212551616838260", "approximate_count_lower_bound": 4700, "approximate_count_upper_bound": 5500, "customer_file_source": "PARTNER_PROVIDED_ONLY", "data_source": {"type": "UNKNOWN", "sub_type": "ANYTHING", "creation_params": "[]"}, "delivery_status": {"code": 200, "description": "This audience is ready for use."}, "description": "Custom Audience-Web Traffic [ALL] - _copy", "is_value_based": false, "name": "Web Traffic [ALL] - _copy", "operation_status": {"code": 200, "description": "Normal"}, "permission_for_actions": {"can_edit": true, "can_see_insight": "True", "can_share": "True", "subtype_supports_lookalike": "True", "supports_recipient_lookalike": "False"}, "retention_days": 0, "subtype": "CUSTOM", "time_content_updated": 1679433484, "time_created": 1679433479, "time_updated": 1679433484}, "emitted_at": 1698925454024} {"stream": "ad_creatives", "data": {"id": "23853630774830398", "body": "Until a connector meets our GA reliability standards, you don't pay for it.", "image_url": "https://scontent.fiev6-1.fna.fbcdn.net/v/t45.1600-4/333773383_23853620180320398_4214441850420455541_n.png?_nc_cat=109&ccb=1-7&_nc_sid=c0a1f7&_nc_ohc=qbTWMi-gWi8AX8hFZLQ&_nc_ht=scontent.fiev6-1.fna&edm=ALjApogEAAAA&oh=00_AfC9KndALRjbR5Z4Xz_ZytJTb9rsS_S4_SDvmiegih69vQ&oe=65C8B50F", "account_id": "212551616838260", "actor_id": "112704783733939", "asset_feed_spec": {"bodies": [{"text": "Until a connector meets our GA reliability standards, you don't pay for it."}, {"text": "Reliability is the cornerstone of having an ELT tool you trust."}, {"text": "Don't compromise between cost and connector reliability."}, {"text": "Limitless data movement with free Alpha and Beta connectors"}], "descriptions": [{"text": "Until a connector meets our GA reliability standards, you don't pay for it. "}], "titles": [{"text": "Introducing: our free connector program"}], "optimization_type": "DEGREES_OF_FREEDOM"}, "call_to_action_type": "SIGN_UP", "effective_instagram_story_id": "5605802859523550", "effective_object_story_id": "112704783733939_660115876119531", "title": "Introducing: our free connector program", "name": "Introducing: our free connector program 2023-03-17-ccf7ed52a98e5e699299861a8a323194", "instagram_actor_id": "2185696824778148", "instagram_permalink_url": "https://www.instagram.com/p/Cp5PgWrjU8V/", "object_story_spec": {"page_id": "112704783733939", "instagram_actor_id": "2185696824778148", "link_data": {"link": "https://airbyte.com/free-connector-program?utm_medium=paid_social&utm_source=facebook&utm_campaign=q1_freeconnectorprogram_t", "image_hash": "970937d2f16de20c0a99e598aa876ac0", "call_to_action": {"type": "SIGN_UP"}}}, "object_type": "SHARE", "status": "ACTIVE", "thumbnail_url": "https://external.fiev6-1.fna.fbcdn.net/emg1/v/t13/8568826884261823966?url=https%3A%2F%2Fwww.facebook.com%2Fads%2Fimage%2F%3Fd%3DAQL3nBsTZ0CoQ_uD_vAVwqZKjwi7X3zsqa8EbE4S1aY7w8cjJ7x6BihYqZkQTgC3BzwY5Y_dxv11UvkOL0cMER5tPch9x6_Q2p3xtHYED2DHLT6v9o9CnYB8S5FMSQ91vMBQCbLFVHh_bSr0OT_4bW4V&fb_obo=1&utld=facebook.com&stp=c0.5000x0.5000f_dst-emg0_p64x64_q75&ccb=13-1&oh=06_AbE-j6xf-dGVCh9dJcOJdFM5v4Sydw74rDQJWynPZayneA&oe=65C511DE&_nc_sid=58080a", "image_hash": "970937d2f16de20c0a99e598aa876ac0"}, "emitted_at": 1707288372517} -{"stream": "activities", "data": {"account_id": "212551616838260", "actor_id": "122043039268043192", "actor_name": "Payments RTU Processor", "application_id": "0", "date_time_in_timezone": "03/13/2023 at 6:30 AM", "event_time": "2023-03-13T13:30:47+0000", "event_type": "ad_account_billing_charge", "extra_data": "{\"currency\":\"USD\",\"new_value\":1188,\"transaction_id\":\"5885578541558696-11785530\",\"action\":67,\"type\":\"payment_amount\"}", "object_id": "212551616838260", "object_name": "Airbyte", "object_type": "ACCOUNT", "translated_event_type": "Account billed"}, "emitted_at": 1696931251153} -{"stream": "custom_conversions", "data": {"id": "694166388077667", "account_id": "212551616838260", "creation_time": "2020-04-22T01:36:00+0000", "custom_event_type": "CONTACT", "data_sources": [{"id": "2667253716886462", "source_type": "PIXEL", "name": "Dataline's Pixel"}], "default_conversion_value": 0, "event_source_type": "pixel", "is_archived": true, "is_unavailable": false, "name": "SubscribedButtonClick", "retention_days": 0, "rule": "{\"and\":[{\"event\":{\"eq\":\"PageView\"}},{\"or\":[{\"URL\":{\"i_contains\":\"SubscribedButtonClick\"}}]}]}"}, "emitted_at": 1692180839174} -{"stream": "images", "data": {"id": "212551616838260:c1e94a8768a405f0f212d71fe8336647", "account_id": "212551616838260", "name": "Audience_1_Ad_3_1200x1200_blue_CTA_arrow.png_105", "creatives": ["23853630775340398", "23853630871360398", "23853666124200398"], "original_height": 1200, "original_width": 1200, "permalink_url": "https://www.facebook.com/ads/image/?d=AQIDNjjLb7VzVJ26jXb_HpudCEUJqbV_lLF2JVsdruDcBxnXQEKfzzd21VVJnkm0B-JLosUXNNg1BH78y7FxnK3AH-0D_lnk7kn39_bIcOMK7Z9HYyFInfsVY__adup3A5zGTIcHC9Y98Je5qK-yD8F6", "status": "ACTIVE", "url": "https://scontent-dus1-1.xx.fbcdn.net/v/t45.1600-4/335907140_23853620220420398_4375584095210967511_n.png?_nc_cat=104&ccb=1-7&_nc_sid=2aac32&_nc_ohc=xdjrPpbRGNAAX8Dck01&_nc_ht=scontent-dus1-1.xx&edm=AJcBmwoEAAAA&oh=00_AfDCqQ6viqrgLcfbO3O5-n030Usq7Zyt2c1TmsatqnYf7Q&oe=64E2779A", "created_time": "2023-03-16T13:13:17-0700", "hash": "c1e94a8768a405f0f212d71fe8336647", "url_128": "https://scontent-dus1-1.xx.fbcdn.net/v/t45.1600-4/335907140_23853620220420398_4375584095210967511_n.png?stp=dst-png_s128x128&_nc_cat=104&ccb=1-7&_nc_sid=2aac32&_nc_ohc=xdjrPpbRGNAAX8Dck01&_nc_ht=scontent-dus1-1.xx&edm=AJcBmwoEAAAA&oh=00_AfAY50CMpox2s4w_f18IVx7sZuXlg4quF6YNIJJ8D4PZew&oe=64E2779A", "is_associated_creatives_in_adgroups": true, "updated_time": "2023-03-17T08:09:56-0700", "height": 1200, "width": 1200}, "emitted_at": 1692180839582} -{"stream": "ads_insights", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "actions": [{"action_destination": "244953057175777", "action_target_id": "244953057175777", "action_type": "page_engagement", "value": 3.0, "1d_click": 3.0, "7d_click": 3.0, "28d_click": 3.0}, {"action_destination": "244953057175777", "action_target_id": "244953057175777", "action_type": "post_engagement", "value": 3.0, "1d_click": 3.0, "7d_click": 3.0, "28d_click": 3.0}, {"action_destination": "244953057175777", "action_target_id": "244953057175777", "action_type": "link_click", "value": 3.0, "1d_click": 3.0, "7d_click": 3.0, "28d_click": 3.0}], "ad_id": "23846765228310398", "ad_name": "Airbyte Ad", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 3, "conversion_rate_ranking": "UNKNOWN", "cost_per_estimated_ad_recallers": 0.007, "cost_per_inline_link_click": 0.396667, "cost_per_inline_post_engagement": 0.396667, "cost_per_unique_click": 0.396667, "cost_per_unique_inline_link_click": 0.396667, "cpc": 0.396667, "cpm": 0.902199, "cpp": 0.948207, "created_time": "2021-02-09", "ctr": 0.227445, "date_start": "2021-02-15", "date_stop": "2021-02-15", "engagement_rate_ranking": "UNKNOWN", "estimated_ad_recall_rate": 13.545817, "estimated_ad_recallers": 170.0, "frequency": 1.050996, "impressions": 1319, "inline_link_click_ctr": 0.227445, "inline_link_clicks": 3, "inline_post_engagement": 3, "instant_experience_clicks_to_open": 1.0, "instant_experience_clicks_to_start": 1.0, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "outbound_clicks": [{"action_destination": "244953057175777", "action_target_id": "244953057175777", "action_type": "outbound_click", "value": 3.0}], "quality_ranking": "UNKNOWN", "reach": 1255, "social_spend": 0.0, "spend": 1.19, "unique_actions": [{"action_destination": "244953057175777", "action_target_id": "244953057175777", "action_type": "page_engagement", "value": 3.0, "1d_click": 3.0, "7d_click": 3.0, "28d_click": 3.0}, {"action_destination": "244953057175777", "action_target_id": "244953057175777", "action_type": "post_engagement", "value": 3.0, "1d_click": 3.0, "7d_click": 3.0, "28d_click": 3.0}, {"action_destination": "244953057175777", "action_target_id": "244953057175777", "action_type": "link_click", "value": 3.0, "1d_click": 3.0, "7d_click": 3.0, "28d_click": 3.0}], "unique_clicks": 3, "unique_ctr": 0.239044, "unique_inline_link_click_ctr": 0.239044, "unique_inline_link_clicks": 3, "unique_link_clicks_ctr": 0.239044, "unique_outbound_clicks": [{"action_destination": "244953057175777", "action_target_id": "244953057175777", "action_type": "outbound_click", "value": 3.0}], "updated_time": "2021-08-27", "video_play_curve_actions": [{"action_type": "video_view"}], "website_ctr": [{"action_type": "link_click", "value": 0.227445}], "wish_bid": 0.0}, "emitted_at": 1682686057366} -{"stream": "ads_insights_action_carousel_card", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846765228310398", "ad_name": "Airbyte Ad", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 3, "conversion_rate_ranking": "UNKNOWN", "cost_per_estimated_ad_recallers": 0.007, "cost_per_inline_link_click": 0.396667, "cost_per_inline_post_engagement": 0.396667, "cost_per_unique_click": 0.396667, "cost_per_unique_inline_link_click": 0.396667, "cpc": 0.396667, "cpm": 0.902199, "cpp": 0.948207, "created_time": "2021-02-09", "ctr": 0.227445, "date_start": "2021-02-15", "date_stop": "2021-02-15", "engagement_rate_ranking": "UNKNOWN", "estimated_ad_recall_rate": 13.545817, "estimated_ad_recallers": 170.0, "frequency": 1.050996, "impressions": 1319, "inline_link_click_ctr": 0.227445, "inline_post_engagement": 3, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "quality_ranking": "UNKNOWN", "reach": 1255, "social_spend": 0.0, "spend": 1.19, "unique_actions": [{"action_type": "page_engagement", "value": 3.0, "1d_click": 3.0, "7d_click": 3.0, "28d_click": 3.0}, {"action_type": "post_engagement", "value": 3.0, "1d_click": 3.0, "7d_click": 3.0, "28d_click": 3.0}, {"action_type": "link_click", "value": 3.0, "1d_click": 3.0, "7d_click": 3.0, "28d_click": 3.0}], "unique_clicks": 3, "unique_ctr": 0.239044, "unique_inline_link_click_ctr": 0.239044, "unique_inline_link_clicks": 3, "updated_time": "2021-08-27", "video_play_curve_actions": [{"action_type": "video_view"}], "website_ctr": [{"action_type": "link_click", "value": 0.227445}], "wish_bid": 0.0}, "emitted_at": 1692180857757} -{"stream": "ads_insights_action_conversion_device", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "cost_per_estimated_ad_recallers": 0.004, "cpm": 0.754717, "cpp": 0.784314, "created_time": "2021-02-11", "ctr": 0.0, "date_start": "2021-02-15", "date_stop": "2021-02-15", "estimated_ad_recall_rate": 19.607843, "estimated_ad_recallers": 10.0, "frequency": 1.039216, "impressions": 53, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "reach": 51, "spend": 0.04, "unique_clicks": 0, "updated_time": "2021-08-27", "device_platform": "desktop"}, "emitted_at": 1696936270620} -{"stream": "ads_insights_action_reaction", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 1, "conversion_rate_ranking": "UNKNOWN", "cost_per_estimated_ad_recallers": 0.008889, "cost_per_unique_click": 0.8, "cpc": 0.8, "cpm": 1.255887, "cpp": 1.296596, "created_time": "2021-02-11", "ctr": 0.156986, "date_start": "2021-02-14", "date_stop": "2021-02-14", "engagement_rate_ranking": "UNKNOWN", "estimated_ad_recall_rate": 14.58671, "estimated_ad_recallers": 90.0, "frequency": 1.032415, "impressions": 637, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "quality_ranking": "UNKNOWN", "reach": 617, "social_spend": 0.0, "spend": 0.8, "unique_clicks": 1, "unique_ctr": 0.162075, "updated_time": "2021-08-27", "video_play_curve_actions": [{"action_type": "video_view"}], "wish_bid": 0.0}, "emitted_at": 1696936287351} -{"stream": "ads_insights_action_type", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 1, "conversion_rate_ranking": "UNKNOWN", "cost_per_estimated_ad_recallers": 0.008889, "cost_per_unique_click": 0.8, "cpc": 0.8, "cpm": 1.255887, "cpp": 1.296596, "created_time": "2021-02-11", "ctr": 0.156986, "date_start": "2021-02-14", "date_stop": "2021-02-14", "engagement_rate_ranking": "UNKNOWN", "estimated_ad_recall_rate": 14.58671, "estimated_ad_recallers": 90.0, "frequency": 1.032415, "impressions": 637, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "quality_ranking": "UNKNOWN", "reach": 617, "social_spend": 0.0, "spend": 0.8, "unique_clicks": 1, "unique_ctr": 0.162075, "updated_time": "2021-08-27", "video_play_curve_actions": [{"action_type": "video_view"}], "wish_bid": 0.0}, "emitted_at": 1696936315908} -{"stream": "ads_insights_action_video_sound", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 1, "conversion_rate_ranking": "UNKNOWN", "cost_per_estimated_ad_recallers": 0.008889, "cost_per_unique_click": 0.8, "cpc": 0.8, "cpm": 1.255887, "cpp": 1.296596, "created_time": "2021-02-11", "ctr": 0.156986, "date_start": "2021-02-14", "date_stop": "2021-02-14", "engagement_rate_ranking": "UNKNOWN", "estimated_ad_recall_rate": 14.58671, "estimated_ad_recallers": 90.0, "frequency": 1.032415, "impressions": 637, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "quality_ranking": "UNKNOWN", "reach": 617, "social_spend": 0.0, "spend": 0.8, "unique_clicks": 1, "unique_ctr": 0.162075, "updated_time": "2021-08-27", "video_play_curve_actions": [{"action_type": "video_view"}], "wish_bid": 0.0}, "emitted_at": 1696936296894} -{"stream": "ads_insights_action_video_type", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 1, "conversion_rate_ranking": "UNKNOWN", "cost_per_estimated_ad_recallers": 0.008889, "cost_per_unique_click": 0.8, "cpc": 0.8, "cpm": 1.255887, "cpp": 1.296596, "created_time": "2021-02-11", "ctr": 0.156986, "date_start": "2021-02-14", "date_stop": "2021-02-14", "engagement_rate_ranking": "UNKNOWN", "estimated_ad_recall_rate": 14.58671, "estimated_ad_recallers": 90.0, "frequency": 1.032415, "impressions": 637, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "quality_ranking": "UNKNOWN", "reach": 617, "social_spend": 0.0, "spend": 0.8, "unique_clicks": 1, "unique_ctr": 0.162075, "updated_time": "2021-08-27", "video_play_curve_actions": [{"action_type": "video_view"}], "wish_bid": 0.0}, "emitted_at": 1696936306631} -{"stream": "ads_insights_age_and_gender", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "cost_per_estimated_ad_recallers": 0.02, "cpm": 0.869565, "cpp": 0.952381, "created_time": "2021-02-11", "ctr": 0.0, "date_start": "2021-02-15", "date_stop": "2021-02-15", "estimated_ad_recall_rate": 4.761905, "estimated_ad_recallers": 1.0, "frequency": 1.095238, "gender_targeting": "female", "impressions": 23, "instant_experience_clicks_to_open": 1.0, "instant_experience_clicks_to_start": 1.0, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "reach": 21, "spend": 0.02, "unique_clicks": 0, "updated_time": "2021-08-27", "age": "55-64", "gender": "female"}, "emitted_at": 1696939548058} -{"stream": "ads_insights_country", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 1, "cost_per_estimated_ad_recallers": 0.008889, "cost_per_unique_click": 0.8, "cpc": 0.8, "cpm": 1.255887, "cpp": 1.296596, "created_time": "2021-02-11", "ctr": 0.156986, "date_start": "2021-02-14", "date_stop": "2021-02-14", "estimated_ad_recall_rate": 14.58671, "estimated_ad_recallers": 90.0, "frequency": 1.032415, "impressions": 637, "instant_experience_clicks_to_open": 1.0, "instant_experience_clicks_to_start": 1.0, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "reach": 617, "spend": 0.8, "unique_clicks": 1, "unique_ctr": 0.162075, "updated_time": "2021-08-27", "country": "US"}, "emitted_at": 1696936565587} -{"stream": "ads_insights_delivery_device", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846765228310398", "ad_name": "Airbyte Ad", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "cost_per_estimated_ad_recallers": 0.0075, "cpm": 1.630435, "cpp": 1.744186, "created_time": "2021-02-09", "ctr": 0.0, "date_start": "2021-02-15", "date_stop": "2021-02-15", "estimated_ad_recall_rate": 23.255814, "estimated_ad_recallers": 20.0, "frequency": 1.069767, "impressions": 92, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "reach": 86, "spend": 0.15, "unique_clicks": 0, "updated_time": "2021-08-27", "device_platform": "desktop"}, "emitted_at": 1696936327621} -{"stream": "ads_insights_delivery_platform", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "actions": [{"action_type": "page_engagement", "value": 3.0, "1d_click": 3.0, "7d_click": 3.0, "28d_click": 3.0}, {"action_type": "post_engagement", "value": 3.0, "1d_click": 3.0, "7d_click": 3.0, "28d_click": 3.0}, {"action_type": "link_click", "value": 3.0, "1d_click": 3.0, "7d_click": 3.0, "28d_click": 3.0}], "ad_id": "23846765228310398", "ad_name": "Airbyte Ad", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 3, "cost_per_action_type": [{"action_type": "post_engagement", "value": 0.39, "1d_click": 0.39, "7d_click": 0.39, "28d_click": 0.39}, {"action_type": "page_engagement", "value": 0.39, "1d_click": 0.39, "7d_click": 0.39, "28d_click": 0.39}, {"action_type": "link_click", "value": 0.39, "1d_click": 0.39, "7d_click": 0.39, "28d_click": 0.39}], "cost_per_estimated_ad_recallers": 0.006882, "cost_per_inline_link_click": 0.39, "cost_per_inline_post_engagement": 0.39, "cost_per_outbound_click": [{"action_type": "outbound_click", "value": 0.39}], "cost_per_unique_action_type": [{"action_type": "post_engagement", "value": 0.39, "1d_click": 0.39, "7d_click": 0.39, "28d_click": 0.39}, {"action_type": "page_engagement", "value": 0.39, "1d_click": 0.39, "7d_click": 0.39, "28d_click": 0.39}, {"action_type": "link_click", "value": 0.39, "1d_click": 0.39, "7d_click": 0.39, "28d_click": 0.39}], "cost_per_unique_click": 0.39, "cost_per_unique_inline_link_click": 0.39, "cost_per_unique_outbound_click": [{"action_type": "outbound_click", "value": 0.39}], "cpc": 0.39, "cpm": 0.922713, "cpp": 0.971761, "created_time": "2021-02-09", "ctr": 0.236593, "date_start": "2021-02-15", "date_stop": "2021-02-15", "estimated_ad_recall_rate": 14.119601, "estimated_ad_recallers": 170.0, "frequency": 1.053156, "impressions": 1268, "inline_link_click_ctr": 0.236593, "inline_link_clicks": 3, "inline_post_engagement": 3, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "outbound_clicks": [{"action_type": "outbound_click", "value": 3.0}], "outbound_clicks_ctr": [{"action_type": "outbound_click", "value": 0.236593}], "reach": 1204, "spend": 1.17, "unique_actions": [{"action_type": "page_engagement", "value": 3.0, "1d_click": 3.0, "7d_click": 3.0, "28d_click": 3.0}, {"action_type": "post_engagement", "value": 3.0, "1d_click": 3.0, "7d_click": 3.0, "28d_click": 3.0}, {"action_type": "link_click", "value": 3.0, "1d_click": 3.0, "7d_click": 3.0, "28d_click": 3.0}], "unique_clicks": 3, "unique_ctr": 0.249169, "unique_inline_link_click_ctr": 0.249169, "unique_inline_link_clicks": 3, "unique_link_clicks_ctr": 0.249169, "unique_outbound_clicks": [{"action_type": "outbound_click", "value": 3.0}], "unique_outbound_clicks_ctr": [{"action_type": "outbound_click", "value": 0.249169}], "updated_time": "2021-08-27", "website_ctr": [{"action_type": "link_click", "value": 0.236593}], "publisher_platform": "facebook"}, "emitted_at": 1696936337306} -{"stream": "ads_insights_delivery_platform_and_device_platform", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846765228310398", "ad_name": "Airbyte Ad", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "cost_per_estimated_ad_recallers": 0.002, "cpm": 0.392157, "cpp": 0.392157, "created_time": "2021-02-09", "ctr": 0.0, "date_start": "2021-02-15", "date_stop": "2021-02-15", "estimated_ad_recall_rate": 19.607843, "estimated_ad_recallers": 10.0, "frequency": 1.0, "impressions": 51, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "reach": 51, "spend": 0.02, "unique_clicks": 0, "updated_time": "2021-08-27", "publisher_platform": "instagram", "device_platform": "mobile_app"}, "emitted_at": 1696967644628} -{"stream": "ads_insights_demographics_age", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "cost_per_estimated_ad_recallers": 0.0085, "cpm": 1.14094, "cpp": 1.188811, "created_time": "2021-02-11", "ctr": 0.0, "date_start": "2021-02-15", "date_stop": "2021-02-15", "estimated_ad_recall_rate": 13.986014, "estimated_ad_recallers": 20.0, "frequency": 1.041958, "impressions": 149, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "reach": 143, "spend": 0.17, "unique_clicks": 0, "updated_time": "2021-08-27", "age": "25-34"}, "emitted_at": 1696936389857} -{"stream": "ads_insights_demographics_country", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 1, "cost_per_estimated_ad_recallers": 0.008889, "cost_per_unique_click": 0.8, "cpc": 0.8, "cpm": 1.255887, "cpp": 1.296596, "created_time": "2021-02-11", "ctr": 0.156986, "date_start": "2021-02-14", "date_stop": "2021-02-14", "estimated_ad_recall_rate": 14.58671, "estimated_ad_recallers": 90.0, "frequency": 1.032415, "impressions": 637, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "reach": 617, "spend": 0.8, "unique_clicks": 1, "unique_ctr": 0.162075, "updated_time": "2021-08-27", "country": "US"}, "emitted_at": 1696936440731} -{"stream": "ads_insights_demographics_dma_region", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "cost_per_estimated_ad_recallers": 0.0, "cpm": 0.0, "created_time": "2021-02-11", "ctr": 0.0, "date_start": "2021-02-15", "date_stop": "2021-02-15", "estimated_ad_recallers": 1.0, "frequency": 1.0, "impressions": 1, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "reach": 1, "spend": 0.0, "unique_clicks": 0, "updated_time": "2021-08-27", "dma": "Anchorage"}, "emitted_at": 1696936491393} -{"stream": "ads_insights_demographics_gender", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "cost_per_estimated_ad_recallers": 0.0085, "cpm": 1.268657, "cpp": 1.338583, "created_time": "2021-02-11", "ctr": 0.0, "date_start": "2021-02-14", "date_stop": "2021-02-14", "estimated_ad_recall_rate": 15.748032, "estimated_ad_recallers": 20.0, "frequency": 1.055118, "gender_targeting": "female", "impressions": 134, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "reach": 127, "spend": 0.17, "unique_clicks": 0, "updated_time": "2021-08-27", "gender": "female"}, "emitted_at": 1696967753477} -{"stream": "ads_insights_dma", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "cost_per_estimated_ad_recallers": 0.0, "cpm": 0.0, "created_time": "2021-02-11", "ctr": 0.0, "date_start": "2021-02-15", "date_stop": "2021-02-15", "estimated_ad_recallers": 1.0, "frequency": 1.0, "impressions": 1, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "reach": 1, "spend": 0.0, "unique_clicks": 0, "updated_time": "2021-08-27", "dma": "West Palm Beach-Ft. Pierce"}, "emitted_at": 1696936556045} -{"stream": "ads_insights_platform_and_device", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "cost_per_estimated_ad_recallers": 0.0, "cpm": 0.0, "cpp": 0.0, "created_time": "2021-02-11", "ctr": 0.0, "date_start": "2021-02-15", "date_stop": "2021-02-15", "estimated_ad_recall_rate": 12.5, "estimated_ad_recallers": 1.0, "frequency": 1.0, "impressions": 8, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "reach": 8, "spend": 0.0, "unique_clicks": 0, "updated_time": "2021-08-27", "publisher_platform": "instagram", "platform_position": "feed", "impression_device": "android_smartphone"}, "emitted_at": 1696936579028} -{"stream": "ads_insights_region", "data": {"account_currency": "USD", "account_id": "212551616838260", "account_name": "Airbyte", "ad_id": "23846784938030398", "ad_name": "Stock photo ad 2", "adset_id": "23846765228280398", "adset_name": "Vanilla awareness ad set", "buying_type": "AUCTION", "campaign_id": "23846765228240398", "campaign_name": "Airbyte Awareness Campaign 1 (sherif)", "clicks": 0, "cost_per_estimated_ad_recallers": 0.02, "cpm": 1.111111, "cpp": 1.111111, "created_time": "2021-02-11", "ctr": 0.0, "date_start": "2021-02-15", "date_stop": "2021-02-15", "estimated_ad_recall_rate": 5.555556, "estimated_ad_recallers": 1.0, "frequency": 1.0, "impressions": 18, "instant_experience_clicks_to_open": 1.0, "instant_experience_clicks_to_start": 1.0, "objective": "BRAND_AWARENESS", "optimization_goal": "AD_RECALL_LIFT", "reach": 18, "spend": 0.02, "unique_clicks": 0, "updated_time": "2021-08-27", "region": "New York"}, "emitted_at": 1696936621899} -{"stream": "customcustom_insight_stream", "data": {"account_id": "212551616838260", "cpc": 0.27, "ad_id": "23846765228310398", "clicks": 1, "account_name": "Airbyte", "date_start": "2021-02-15", "date_stop": "2021-02-15", "gender": "female"}, "emitted_at": 1695385890508} +{"stream":"activities","data":{"account_id":"212551616838260","actor_id":"122043039268043192","actor_name":"Payments RTU Processor","application_id":"0","date_time_in_timezone":"03/13/2023 at 6:30 AM","event_time":"2023-03-13T13:30:47+0000","event_type":"ad_account_billing_charge","extra_data":"{\"currency\":\"USD\",\"new_value\":1188,\"transaction_id\":\"5885578541558696-11785530\",\"action\":67,\"type\":\"payment_amount\"}","object_id":"212551616838260","object_name":"Airbyte","object_type":"ACCOUNT","translated_event_type":"Account billed"},"emitted_at":1696931251153} +{"stream":"custom_conversions","data":{"id":"694166388077667","account_id":"212551616838260","creation_time":"2020-04-22T01:36:00+0000","custom_event_type":"CONTACT","data_sources":[{"id":"2667253716886462","source_type":"PIXEL","name":"Dataline's Pixel"}],"default_conversion_value":0,"event_source_type":"pixel","is_archived":true,"is_unavailable":false,"name":"SubscribedButtonClick","retention_days":0,"rule":"{\"and\":[{\"event\":{\"eq\":\"PageView\"}},{\"or\":[{\"URL\":{\"i_contains\":\"SubscribedButtonClick\"}}]}]}"},"emitted_at":1692180839174} +{"stream":"images","data":{"id":"212551616838260:c1e94a8768a405f0f212d71fe8336647","account_id":"212551616838260","name":"Audience_1_Ad_3_1200x1200_blue_CTA_arrow.png_105","creatives":["23853630775340398","23853630871360398","23853666124200398"],"original_height":1200,"original_width":1200,"permalink_url":"https://www.facebook.com/ads/image/?d=AQIDNjjLb7VzVJ26jXb_HpudCEUJqbV_lLF2JVsdruDcBxnXQEKfzzd21VVJnkm0B-JLosUXNNg1BH78y7FxnK3AH-0D_lnk7kn39_bIcOMK7Z9HYyFInfsVY__adup3A5zGTIcHC9Y98Je5qK-yD8F6","status":"ACTIVE","url":"https://scontent-dus1-1.xx.fbcdn.net/v/t45.1600-4/335907140_23853620220420398_4375584095210967511_n.png?_nc_cat=104&ccb=1-7&_nc_sid=2aac32&_nc_ohc=xdjrPpbRGNAAX8Dck01&_nc_ht=scontent-dus1-1.xx&edm=AJcBmwoEAAAA&oh=00_AfDCqQ6viqrgLcfbO3O5-n030Usq7Zyt2c1TmsatqnYf7Q&oe=64E2779A","created_time":"2023-03-16T13:13:17-0700","hash":"c1e94a8768a405f0f212d71fe8336647","url_128":"https://scontent-dus1-1.xx.fbcdn.net/v/t45.1600-4/335907140_23853620220420398_4375584095210967511_n.png?stp=dst-png_s128x128&_nc_cat=104&ccb=1-7&_nc_sid=2aac32&_nc_ohc=xdjrPpbRGNAAX8Dck01&_nc_ht=scontent-dus1-1.xx&edm=AJcBmwoEAAAA&oh=00_AfAY50CMpox2s4w_f18IVx7sZuXlg4quF6YNIJJ8D4PZew&oe=64E2779A","is_associated_creatives_in_adgroups":true,"updated_time":"2023-03-17T08:09:56-0700","height":1200,"width":1200},"emitted_at":1692180839582} +{"stream":"ads_insights","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","actions":[{"action_destination":"244953057175777","action_target_id":"244953057175777","action_type":"page_engagement","value":3.0,"1d_click":3.0,"7d_click":3.0,"28d_click":3.0},{"action_destination":"244953057175777","action_target_id":"244953057175777","action_type":"post_engagement","value":3.0,"1d_click":3.0,"7d_click":3.0,"28d_click":3.0},{"action_destination":"244953057175777","action_target_id":"244953057175777","action_type":"link_click","value":3.0,"1d_click":3.0,"7d_click":3.0,"28d_click":3.0}],"ad_id":"23846765228310398","ad_name":"Airbyte Ad","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":3,"conversion_rate_ranking":"UNKNOWN","cost_per_estimated_ad_recallers":0.007,"cost_per_inline_link_click":0.396667,"cost_per_inline_post_engagement":0.396667,"cost_per_unique_click":0.396667,"cost_per_unique_inline_link_click":0.396667,"cpc":0.396667,"cpm":0.902199,"cpp":0.948207,"created_time":"2021-02-09","ctr":0.227445,"date_start":"2021-02-15","date_stop":"2021-02-15","engagement_rate_ranking":"UNKNOWN","estimated_ad_recall_rate":13.545817,"estimated_ad_recallers":170.0,"frequency":1.050996,"impressions":1319,"inline_link_click_ctr":0.227445,"inline_link_clicks":3,"inline_post_engagement":3,"instant_experience_clicks_to_open":1.0,"instant_experience_clicks_to_start":1.0,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","outbound_clicks":[{"action_destination":"244953057175777","action_target_id":"244953057175777","action_type":"outbound_click","value":3.0}],"quality_ranking":"UNKNOWN","reach":1255,"social_spend":0.0,"spend":1.19,"unique_actions":[{"action_destination":"244953057175777","action_target_id":"244953057175777","action_type":"page_engagement","value":3.0,"1d_click":3.0,"7d_click":3.0,"28d_click":3.0},{"action_destination":"244953057175777","action_target_id":"244953057175777","action_type":"post_engagement","value":3.0,"1d_click":3.0,"7d_click":3.0,"28d_click":3.0},{"action_destination":"244953057175777","action_target_id":"244953057175777","action_type":"link_click","value":3.0,"1d_click":3.0,"7d_click":3.0,"28d_click":3.0}],"unique_clicks":3,"unique_ctr":0.239044,"unique_inline_link_click_ctr":0.239044,"unique_inline_link_clicks":3,"unique_link_clicks_ctr":0.239044,"unique_outbound_clicks":[{"action_destination":"244953057175777","action_target_id":"244953057175777","action_type":"outbound_click","value":3.0}],"updated_time":"2021-08-27","video_play_curve_actions":[{"action_type":"video_view"}],"website_ctr":[{"action_type":"link_click","value":0.227445}],"wish_bid":0.0},"emitted_at":1682686057366} +{"stream":"ads_insights_action_carousel_card","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846765228310398","ad_name":"Airbyte Ad","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":3,"conversion_rate_ranking":"UNKNOWN","cost_per_estimated_ad_recallers":0.007,"cost_per_inline_link_click":0.396667,"cost_per_inline_post_engagement":0.396667,"cost_per_unique_click":0.396667,"cost_per_unique_inline_link_click":0.396667,"cpc":0.396667,"cpm":0.902199,"cpp":0.948207,"created_time":"2021-02-09","ctr":0.227445,"date_start":"2021-02-15","date_stop":"2021-02-15","engagement_rate_ranking":"UNKNOWN","estimated_ad_recall_rate":13.545817,"estimated_ad_recallers":170.0,"frequency":1.050996,"impressions":1319,"inline_link_click_ctr":0.227445,"inline_post_engagement":3,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","quality_ranking":"UNKNOWN","reach":1255,"social_spend":0.0,"spend":1.19,"unique_actions":[{"action_type":"page_engagement","value":3.0,"1d_click":3.0,"7d_click":3.0,"28d_click":3.0},{"action_type":"post_engagement","value":3.0,"1d_click":3.0,"7d_click":3.0,"28d_click":3.0},{"action_type":"link_click","value":3.0,"1d_click":3.0,"7d_click":3.0,"28d_click":3.0}],"unique_clicks":3,"unique_ctr":0.239044,"unique_inline_link_click_ctr":0.239044,"unique_inline_link_clicks":3,"updated_time":"2021-08-27","video_play_curve_actions":[{"action_type":"video_view"}],"website_ctr":[{"action_type":"link_click","value":0.227445}],"wish_bid":0.0},"emitted_at":1692180857757} +{"stream":"ads_insights_action_conversion_device","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846784938030398","ad_name":"Stock photo ad 2","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":0,"cost_per_estimated_ad_recallers":0.004,"cpm":0.754717,"cpp":0.784314,"created_time":"2021-02-11","ctr":0.0,"date_start":"2021-02-15","date_stop":"2021-02-15","estimated_ad_recall_rate":19.607843,"estimated_ad_recallers":10.0,"frequency":1.039216,"impressions":53,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","reach":51,"spend":0.04,"unique_clicks":0,"updated_time":"2021-08-27","device_platform":"desktop"},"emitted_at":1696936270620} +{"stream":"ads_insights_action_reaction","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846784938030398","ad_name":"Stock photo ad 2","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":1,"conversion_rate_ranking":"UNKNOWN","cost_per_estimated_ad_recallers":0.008889,"cost_per_unique_click":0.8,"cpc":0.8,"cpm":1.255887,"cpp":1.296596,"created_time":"2021-02-11","ctr":0.156986,"date_start":"2021-02-14","date_stop":"2021-02-14","engagement_rate_ranking":"UNKNOWN","estimated_ad_recall_rate":14.58671,"estimated_ad_recallers":90.0,"frequency":1.032415,"impressions":637,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","quality_ranking":"UNKNOWN","reach":617,"social_spend":0.0,"spend":0.8,"unique_clicks":1,"unique_ctr":0.162075,"updated_time":"2021-08-27","video_play_curve_actions":[{"action_type":"video_view"}],"wish_bid":0.0},"emitted_at":1696936287351} +{"stream":"ads_insights_action_type","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846784938030398","ad_name":"Stock photo ad 2","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":1,"conversion_rate_ranking":"UNKNOWN","cost_per_estimated_ad_recallers":0.008889,"cost_per_unique_click":0.8,"cpc":0.8,"cpm":1.255887,"cpp":1.296596,"created_time":"2021-02-11","ctr":0.156986,"date_start":"2021-02-14","date_stop":"2021-02-14","engagement_rate_ranking":"UNKNOWN","estimated_ad_recall_rate":14.58671,"estimated_ad_recallers":90.0,"frequency":1.032415,"impressions":637,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","quality_ranking":"UNKNOWN","reach":617,"social_spend":0.0,"spend":0.8,"unique_clicks":1,"unique_ctr":0.162075,"updated_time":"2021-08-27","video_play_curve_actions":[{"action_type":"video_view"}],"wish_bid":0.0},"emitted_at":1696936315908} +{"stream":"ads_insights_action_video_sound","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846784938030398","ad_name":"Stock photo ad 2","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":1,"conversion_rate_ranking":"UNKNOWN","cost_per_estimated_ad_recallers":0.008889,"cost_per_unique_click":0.8,"cpc":0.8,"cpm":1.255887,"cpp":1.296596,"created_time":"2021-02-11","ctr":0.156986,"date_start":"2021-02-14","date_stop":"2021-02-14","engagement_rate_ranking":"UNKNOWN","estimated_ad_recall_rate":14.58671,"estimated_ad_recallers":90.0,"frequency":1.032415,"impressions":637,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","quality_ranking":"UNKNOWN","reach":617,"social_spend":0.0,"spend":0.8,"unique_clicks":1,"unique_ctr":0.162075,"updated_time":"2021-08-27","video_play_curve_actions": [{"action_type": "video_view"}],"wish_bid":0.0},"emitted_at":1696936296894} +{"stream":"ads_insights_action_video_type","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846784938030398","ad_name":"Stock photo ad 2","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":1,"conversion_rate_ranking":"UNKNOWN","cost_per_estimated_ad_recallers":0.008889,"cost_per_unique_click":0.8,"cpc":0.8,"cpm":1.255887,"cpp":1.296596,"created_time":"2021-02-11","ctr":0.156986,"date_start":"2021-02-14","date_stop":"2021-02-14","engagement_rate_ranking":"UNKNOWN","estimated_ad_recall_rate":14.58671,"estimated_ad_recallers":90.0,"frequency":1.032415,"impressions":637,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","quality_ranking":"UNKNOWN","reach":617,"social_spend":0.0,"spend":0.8,"unique_clicks":1,"unique_ctr":0.162075,"updated_time":"2021-08-27","video_play_curve_actions":[{"action_type":"video_view"}],"wish_bid":0.0},"emitted_at":1696936306631} +{"stream":"ads_insights_age_and_gender","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846784938030398","ad_name":"Stock photo ad 2","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":0,"cost_per_estimated_ad_recallers":0.02,"cpm":0.869565,"cpp":0.952381,"created_time":"2021-02-11","ctr":0.0,"date_start":"2021-02-15","date_stop":"2021-02-15","estimated_ad_recall_rate":4.761905,"estimated_ad_recallers":1.0,"frequency":1.095238,"gender_targeting":"female","impressions":23,"instant_experience_clicks_to_open":1.0,"instant_experience_clicks_to_start":1.0,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","reach":21,"spend":0.02,"unique_clicks":0,"updated_time":"2021-08-27","age":"55-64","gender":"female"},"emitted_at":1696939548058} +{"stream":"ads_insights_country","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846784938030398","ad_name":"Stock photo ad 2","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":1,"cost_per_estimated_ad_recallers":0.008889,"cost_per_unique_click":0.8,"cpc":0.8,"cpm":1.255887,"cpp":1.296596,"created_time":"2021-02-11","ctr":0.156986,"date_start":"2021-02-14","date_stop":"2021-02-14","estimated_ad_recall_rate":14.58671,"estimated_ad_recallers":90.0,"frequency":1.032415,"impressions":637,"instant_experience_clicks_to_open":1.0,"instant_experience_clicks_to_start":1.0,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","reach":617,"spend":0.8,"unique_clicks":1,"unique_ctr":0.162075,"updated_time":"2021-08-27","country":"US"},"emitted_at":1696936565587} +{"stream":"ads_insights_delivery_device","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846765228310398","ad_name":"Airbyte Ad","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":0,"cost_per_estimated_ad_recallers":0.0075,"cpm":1.630435,"cpp":1.744186,"created_time":"2021-02-09","ctr":0.0,"date_start":"2021-02-15","date_stop":"2021-02-15","estimated_ad_recall_rate":23.255814,"estimated_ad_recallers":20.0,"frequency":1.069767,"impressions":92,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","reach":86,"spend":0.15,"unique_clicks":0,"updated_time":"2021-08-27","device_platform":"desktop"},"emitted_at":1696936327621} +{"stream":"ads_insights_delivery_platform","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","actions":[{"action_type":"page_engagement","value":3.0,"1d_click":3.0,"7d_click":3.0,"28d_click":3.0},{"action_type":"post_engagement","value":3.0,"1d_click":3.0,"7d_click":3.0,"28d_click":3.0},{"action_type":"link_click","value":3.0,"1d_click":3.0,"7d_click":3.0,"28d_click":3.0}],"ad_id":"23846765228310398","ad_name":"Airbyte Ad","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":3,"cost_per_action_type":[{"action_type":"post_engagement","value":0.39,"1d_click":0.39,"7d_click":0.39,"28d_click":0.39},{"action_type":"page_engagement","value":0.39,"1d_click":0.39,"7d_click":0.39,"28d_click":0.39},{"action_type":"link_click","value":0.39,"1d_click":0.39,"7d_click":0.39,"28d_click":0.39}],"cost_per_estimated_ad_recallers":0.006882,"cost_per_inline_link_click":0.39,"cost_per_inline_post_engagement":0.39,"cost_per_outbound_click":[{"action_type":"outbound_click","value":0.39}],"cost_per_unique_action_type":[{"action_type":"post_engagement","value":0.39,"1d_click":0.39,"7d_click":0.39,"28d_click":0.39},{"action_type":"page_engagement","value":0.39,"1d_click":0.39,"7d_click":0.39,"28d_click":0.39},{"action_type":"link_click","value":0.39,"1d_click":0.39,"7d_click":0.39,"28d_click":0.39}],"cost_per_unique_click":0.39,"cost_per_unique_inline_link_click":0.39,"cost_per_unique_outbound_click":[{"action_type":"outbound_click","value":0.39}],"cpc":0.39,"cpm":0.922713,"cpp":0.971761,"created_time":"2021-02-09","ctr":0.236593,"date_start":"2021-02-15","date_stop":"2021-02-15","estimated_ad_recall_rate":14.119601,"estimated_ad_recallers":170.0,"frequency":1.053156,"impressions":1268,"inline_link_click_ctr":0.236593,"inline_link_clicks":3,"inline_post_engagement":3,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","outbound_clicks":[{"action_type":"outbound_click","value":3.0}],"outbound_clicks_ctr":[{"action_type":"outbound_click","value":0.236593}],"reach":1204,"spend":1.17,"unique_actions":[{"action_type":"page_engagement","value":3.0,"1d_click":3.0,"7d_click":3.0,"28d_click":3.0},{"action_type":"post_engagement","value":3.0,"1d_click":3.0,"7d_click":3.0,"28d_click":3.0},{"action_type":"link_click","value":3.0,"1d_click":3.0,"7d_click":3.0,"28d_click":3.0}],"unique_clicks":3,"unique_ctr":0.249169,"unique_inline_link_click_ctr":0.249169,"unique_inline_link_clicks":3,"unique_link_clicks_ctr":0.249169,"unique_outbound_clicks":[{"action_type":"outbound_click","value":3.0}],"unique_outbound_clicks_ctr":[{"action_type":"outbound_click","value":0.249169}],"updated_time":"2021-08-27","website_ctr":[{"action_type":"link_click","value":0.236593}],"publisher_platform":"facebook"},"emitted_at":1696936337306} +{"stream":"ads_insights_delivery_platform_and_device_platform","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846765228310398","ad_name":"Airbyte Ad","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":0,"cost_per_estimated_ad_recallers":0.002,"cpm":0.392157,"cpp":0.392157,"created_time":"2021-02-09","ctr":0.0,"date_start":"2021-02-15","date_stop":"2021-02-15","estimated_ad_recall_rate":19.607843,"estimated_ad_recallers":10.0,"frequency":1.0,"impressions":51,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","reach":51,"spend":0.02,"unique_clicks":0,"updated_time":"2021-08-27","publisher_platform":"instagram","device_platform":"mobile_app"},"emitted_at":1696967644628} +{"stream":"ads_insights_demographics_age","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846784938030398","ad_name":"Stock photo ad 2","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":0,"cost_per_estimated_ad_recallers":0.0085,"cpm":1.14094,"cpp":1.188811,"created_time":"2021-02-11","ctr":0.0,"date_start":"2021-02-15","date_stop":"2021-02-15","estimated_ad_recall_rate":13.986014,"estimated_ad_recallers":20.0,"frequency":1.041958,"impressions":149,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","reach":143,"spend":0.17,"unique_clicks":0,"updated_time":"2021-08-27","age":"25-34"},"emitted_at":1696936389857} +{"stream":"ads_insights_demographics_country","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846784938030398","ad_name":"Stock photo ad 2","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":1,"cost_per_estimated_ad_recallers":0.008889,"cost_per_unique_click":0.8,"cpc":0.8,"cpm":1.255887,"cpp":1.296596,"created_time":"2021-02-11","ctr":0.156986,"date_start":"2021-02-14","date_stop":"2021-02-14","estimated_ad_recall_rate":14.58671,"estimated_ad_recallers":90.0,"frequency":1.032415,"impressions":637,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","reach":617,"spend":0.8,"unique_clicks":1,"unique_ctr":0.162075,"updated_time":"2021-08-27","country":"US"},"emitted_at":1696936440731} +{"stream":"ads_insights_demographics_dma_region","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846784938030398","ad_name":"Stock photo ad 2","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":0,"cost_per_estimated_ad_recallers":0.0,"cpm":0.0,"created_time":"2021-02-11","ctr":0.0,"date_start":"2021-02-15","date_stop":"2021-02-15","estimated_ad_recallers":1.0,"frequency":1.0,"impressions":1,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","reach":1,"spend":0.0,"unique_clicks":0,"updated_time":"2021-08-27","dma":"Anchorage"},"emitted_at":1696936491393} +{"stream":"ads_insights_demographics_gender","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846784938030398","ad_name":"Stock photo ad 2","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":0,"cost_per_estimated_ad_recallers":0.0085,"cpm":1.268657,"cpp":1.338583,"created_time":"2021-02-11","ctr":0.0,"date_start":"2021-02-14","date_stop":"2021-02-14","estimated_ad_recall_rate":15.748032,"estimated_ad_recallers":20.0,"frequency":1.055118,"gender_targeting":"female","impressions":134,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","reach":127,"spend":0.17,"unique_clicks":0,"updated_time":"2021-08-27","gender":"female"},"emitted_at":1696967753477} +{"stream":"ads_insights_dma","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846784938030398","ad_name":"Stock photo ad 2","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":0,"cost_per_estimated_ad_recallers":0.0,"cpm":0.0,"created_time":"2021-02-11","ctr":0.0,"date_start":"2021-02-15","date_stop":"2021-02-15","estimated_ad_recallers":1.0,"frequency":1.0,"impressions":1,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","reach":1,"spend":0.0,"unique_clicks":0,"updated_time":"2021-08-27","dma":"West Palm Beach-Ft. Pierce"},"emitted_at":1696936556045} +{"stream":"ads_insights_platform_and_device","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846784938030398","ad_name":"Stock photo ad 2","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":0,"cost_per_estimated_ad_recallers":0.0,"cpm":0.0,"cpp":0.0,"created_time":"2021-02-11","ctr":0.0,"date_start":"2021-02-15","date_stop":"2021-02-15","estimated_ad_recall_rate":12.5,"estimated_ad_recallers":1.0,"frequency":1.0,"impressions":8,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","reach":8,"spend":0.0,"unique_clicks":0,"updated_time":"2021-08-27","publisher_platform":"instagram","platform_position":"feed","impression_device":"android_smartphone"},"emitted_at":1696936579028} +{"stream":"ads_insights_region","data":{"account_currency":"USD","account_id":"212551616838260","account_name":"Airbyte","ad_id":"23846784938030398","ad_name":"Stock photo ad 2","adset_id":"23846765228280398","adset_name":"Vanilla awareness ad set","buying_type":"AUCTION","campaign_id":"23846765228240398","campaign_name":"Airbyte Awareness Campaign 1 (sherif)","clicks":0,"cost_per_estimated_ad_recallers":0.02,"cpm":1.111111,"cpp":1.111111,"created_time":"2021-02-11","ctr":0.0,"date_start":"2021-02-15","date_stop":"2021-02-15","estimated_ad_recall_rate":5.555556,"estimated_ad_recallers":1.0,"frequency":1.0,"impressions":18,"instant_experience_clicks_to_open":1.0,"instant_experience_clicks_to_start":1.0,"objective":"BRAND_AWARENESS","optimization_goal":"AD_RECALL_LIFT","reach":18,"spend":0.02,"unique_clicks":0,"updated_time":"2021-08-27","region":"New York"},"emitted_at":1696936621899} +{"stream":"customcustom_insight_stream","data":{"account_id":"212551616838260","cpc":0.27,"ad_id":"23846765228310398","clicks":1,"account_name":"Airbyte","date_start":"2021-02-15","date_stop":"2021-02-15","gender":"female"},"emitted_at":1695385890508} diff --git a/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/future_state.json b/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/future_state.json index 1cc425ce9f5d..55d23aacd448 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/future_state.json +++ b/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/future_state.json @@ -4,7 +4,7 @@ "stream": { "stream_state": { "event_time": "2121-07-25T13:34:26Z", - "include_deleted": true + "filter_statuses": [] }, "stream_descriptor": { "name": "activities" @@ -16,7 +16,7 @@ "stream": { "stream_state": { "updated_time": "2121-07-25T13:34:26Z", - "include_deleted": true + "filter_statuses": ["ARCHIVED"] }, "stream_descriptor": { "name": "campaigns" @@ -28,7 +28,7 @@ "stream": { "stream_state": { "updated_time": "2121-07-25T13:34:26Z", - "include_deleted": true + "filter_statuses": [] }, "stream_descriptor": { "name": "images" @@ -40,7 +40,7 @@ "stream": { "stream_state": { "updated_time": "2121-07-25T13:34:26Z", - "include_deleted": true + "filter_statuses": [] }, "stream_descriptor": { "name": "videos" @@ -52,7 +52,7 @@ "stream": { "stream_state": { "updated_time": "2121-07-25T13:34:26Z", - "include_deleted": true + "filter_statuses": [] }, "stream_descriptor": { "name": "ad_creatives" @@ -64,7 +64,7 @@ "stream": { "stream_state": { "updated_time": "2121-07-25T13:34:26Z", - "include_deleted": true + "filter_statuses": ["ARCHIVED"] }, "stream_descriptor": { "name": "ad_sets" @@ -75,7 +75,11 @@ "type": "STREAM", "stream": { "stream_state": { - "updated_time": "2121-07-25T13:34:26Z", + "212551616838260": { + "updated_time": "2121-07-25T13:34:26Z", + "filter_statuses": ["ARCHIVED"], + "include_deleted": true + }, "include_deleted": true }, "stream_descriptor": { @@ -88,7 +92,7 @@ "stream": { "stream_state": { "date_start": "2121-07-25T13:34:26Z", - "include_deleted": true + "filter_statuses": [] }, "stream_descriptor": { "name": "ads_insights" @@ -100,7 +104,7 @@ "stream": { "stream_state": { "date_start": "2121-07-25T13:34:26Z", - "include_deleted": true + "filter_statuses": [] }, "stream_descriptor": { "name": "ads_insights_age_and_gender" @@ -112,7 +116,7 @@ "stream": { "stream_state": { "date_start": "2121-07-25T13:34:26Z", - "include_deleted": true + "filter_statuses": [] }, "stream_descriptor": { "name": "ads_insights_country" @@ -124,7 +128,7 @@ "stream": { "stream_state": { "date_start": "2121-07-25T13:34:26Z", - "include_deleted": true + "filter_statuses": [] }, "stream_descriptor": { "name": "ads_insights_dma" @@ -136,7 +140,7 @@ "stream": { "stream_state": { "date_start": "2121-07-25T13:34:26Z", - "include_deleted": true + "filter_statuses": [] }, "stream_descriptor": { "name": "ads_insights_platform_and_device" @@ -148,7 +152,7 @@ "stream": { "stream_state": { "date_start": "2121-07-25T13:34:26Z", - "include_deleted": true + "filter_statuses": [] }, "stream_descriptor": { "name": "ads_insights_region" @@ -160,7 +164,7 @@ "stream": { "stream_state": { "date_start": "2121-07-25T13:34:26Z", - "include_deleted": true + "filter_statuses": [] }, "stream_descriptor": { "name": "ads_insights_action_type" @@ -172,20 +176,19 @@ "stream": { "stream_state": { "date_start": "2121-07-25T13:34:26Z", - "include_deleted": true + "filter_statuses": [] }, "stream_descriptor": { "name": "custommy_custom_insights" } } }, - { "type": "STREAM", "stream": { "stream_state": { "date_start": "2121-07-25T13:34:26Z", - "include_deleted": true + "filter_statuses": [] }, "stream_descriptor": { "name": "ads_insights_action_carousel_card" @@ -197,7 +200,7 @@ "stream": { "stream_state": { "date_start": "2121-07-25T13:34:26Z", - "include_deleted": true + "filter_statuses": [] }, "stream_descriptor": { "name": "ads_insights_action_conversion_device" @@ -209,7 +212,7 @@ "stream": { "stream_state": { "date_start": "2121-07-25T13:34:26Z", - "include_deleted": true + "filter_statuses": [] }, "stream_descriptor": { "name": "ads_insights_action_product_id" @@ -221,7 +224,7 @@ "stream": { "stream_state": { "date_start": "2121-07-25T13:34:26Z", - "include_deleted": true + "filter_statuses": [] }, "stream_descriptor": { "name": "ads_insights_action_reaction" @@ -233,7 +236,7 @@ "stream": { "stream_state": { "date_start": "2121-07-25T13:34:26Z", - "include_deleted": true + "filter_statuses": [] }, "stream_descriptor": { "name": "ads_insights_action_video_sound" @@ -245,7 +248,7 @@ "stream": { "stream_state": { "date_start": "2121-07-25T13:34:26Z", - "include_deleted": true + "filter_statuses": [] }, "stream_descriptor": { "name": "ads_insights_action_video_type" @@ -257,7 +260,7 @@ "stream": { "stream_state": { "date_start": "2121-07-25T13:34:26Z", - "include_deleted": true + "filter_statuses": [] }, "stream_descriptor": { "name": "ads_insights_delivery_device" @@ -269,7 +272,7 @@ "stream": { "stream_state": { "date_start": "2121-07-25T13:34:26Z", - "include_deleted": true + "filter_statuses": [] }, "stream_descriptor": { "name": "ads_insights_delivery_platform" @@ -281,7 +284,7 @@ "stream": { "stream_state": { "date_start": "2121-07-25T13:34:26Z", - "include_deleted": true + "filter_statuses": [] }, "stream_descriptor": { "name": "ads_insights_delivery_platform_and_device_platform" @@ -293,7 +296,7 @@ "stream": { "stream_state": { "date_start": "2121-07-25T13:34:26Z", - "include_deleted": true + "filter_statuses": [] }, "stream_descriptor": { "name": "ads_insights_demographics_age" @@ -305,7 +308,7 @@ "stream": { "stream_state": { "date_start": "2121-07-25T13:34:26Z", - "include_deleted": true + "filter_statuses": [] }, "stream_descriptor": { "name": "ads_insights_demographics_country" @@ -317,7 +320,7 @@ "stream": { "stream_state": { "date_start": "2121-07-25T13:34:26Z", - "include_deleted": true + "filter_statuses": [] }, "stream_descriptor": { "name": "ads_insights_demographics_dma_region" @@ -329,7 +332,7 @@ "stream": { "stream_state": { "date_start": "2121-07-25T13:34:26Z", - "include_deleted": true + "filter_statuses": [] }, "stream_descriptor": { "name": "ads_insights_demographics_gender" @@ -341,7 +344,7 @@ "stream": { "stream_state": { "date_start": "2121-07-25T13:34:26Z", - "include_deleted": true + "filter_statuses": [] }, "stream_descriptor": { "name": "customcustom_insight_stream" diff --git a/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/invalid_config.json index 44d42108c4db..4774cfe38c36 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/invalid_config.json +++ b/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/invalid_config.json @@ -1,6 +1,5 @@ { "start_date": "2023-04-01T00:00:00Z", "account_id": "account", - "access_token": "wrong_token", - "include_deleted": true + "access_token": "wrong_token" } diff --git a/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/spec.json b/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/spec.json index 1657aaeda2d0..4649cf17fdfe 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/spec.json +++ b/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/spec.json @@ -44,24 +44,81 @@ "type": "string", "format": "date-time" }, - "include_deleted": { - "title": "Include Deleted Campaigns, Ads, and AdSets", - "description": "Set to active if you want to include data from deleted Campaigns, Ads, and AdSets.", - "default": false, + "campaign_statuses": { + "title": "Campaign Statuses", + "description": "Select the statuses you want to be loaded in the stream. If no specific statuses are selected, the API's default behavior applies, and some statuses may be filtered out.", + "default": [], "order": 4, - "type": "boolean" + "type": "array", + "items": { + "title": "ValidCampaignStatuses", + "description": "An enumeration.", + "enum": [ + "ACTIVE", + "ARCHIVED", + "DELETED", + "IN_PROCESS", + "PAUSED", + "WITH_ISSUES" + ] + } + }, + "adset_statuses": { + "title": "AdSet Statuses", + "description": "Select the statuses you want to be loaded in the stream. If no specific statuses are selected, the API's default behavior applies, and some statuses may be filtered out.", + "default": [], + "order": 5, + "type": "array", + "items": { + "title": "ValidAdSetStatuses", + "description": "An enumeration.", + "enum": [ + "ACTIVE", + "ARCHIVED", + "CAMPAIGN_PAUSED", + "DELETED", + "IN_PROCESS", + "PAUSED", + "WITH_ISSUES" + ] + } + }, + "ad_statuses": { + "title": "Ad Statuses", + "description": "Select the statuses you want to be loaded in the stream. If no specific statuses are selected, the API's default behavior applies, and some statuses may be filtered out.", + "default": [], + "order": 6, + "type": "array", + "items": { + "title": "ValidAdStatuses", + "description": "An enumeration.", + "enum": [ + "ACTIVE", + "ADSET_PAUSED", + "ARCHIVED", + "CAMPAIGN_PAUSED", + "DELETED", + "DISAPPROVED", + "IN_PROCESS", + "PAUSED", + "PENDING_BILLING_INFO", + "PENDING_REVIEW", + "PREAPPROVED", + "WITH_ISSUES" + ] + } }, "fetch_thumbnail_images": { "title": "Fetch Thumbnail Images from Ad Creative", "description": "Set to active if you want to fetch the thumbnail_url and store the result in thumbnail_data_url for each Ad Creative.", "default": false, - "order": 5, + "order": 7, "type": "boolean" }, "custom_insights": { "title": "Custom Insights", "description": "A list which contains ad statistics entries, each entry must have a name and can contains fields, breakdowns or action_breakdowns. Click on \"add\" to fill this field.", - "order": 6, + "order": 8, "type": "array", "items": { "title": "InsightConfig", @@ -347,7 +404,7 @@ "title": "Page Size of Requests", "description": "Page size used when sending requests to Facebook API to specify number of records per page when response has pagination. Most users do not need to set this field unless they specifically need to tune the connector to address specific issues or use cases.", "default": 100, - "order": 7, + "order": 10, "exclusiveMinimum": 0, "type": "integer" }, @@ -355,7 +412,7 @@ "title": "Insights Lookback Window", "description": "The attribution window. Facebook freezes insight data 28 days after it was generated, which means that all data from the past 28 days may have changed since we last emitted it, so you can retrieve refreshed insights from the past by setting this parameter. If you set a custom lookback window value in Facebook account, please provide the same value here.", "default": 28, - "order": 8, + "order": 11, "maximum": 28, "mininum": 1, "exclusiveMinimum": 0, @@ -365,7 +422,7 @@ "title": "Insights Job Timeout", "description": "Insights Job Timeout establishes the maximum amount of time (in minutes) of waiting for the report job to complete. When timeout is reached the job is considered failed and we are trying to request smaller amount of data by breaking the job to few smaller ones. If you definitely know that 60 minutes is not enough for your report to be processed then you can decrease the timeout value, so we start breaking job to smaller parts faster.", "default": 60, - "order": 9, + "order": 12, "maximum": 60, "mininum": 10, "exclusiveMinimum": 0, diff --git a/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/test_streams.py b/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/test_streams.py index 38c08f4e4139..30d7784bb579 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/test_streams.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/test_streams.py @@ -31,54 +31,129 @@ def configured_catalog_fixture(config) -> ConfiguredAirbyteCatalog: streams = [] # Prefer incremental if available for stream in catalog.streams: - sync_mode = SyncMode.incremental if SyncMode.incremental in stream.supported_sync_modes else SyncMode.full_refresh - streams.append(ConfiguredAirbyteStream(stream=stream, sync_mode=sync_mode, destination_sync_mode=DestinationSyncMode.append)) + sync_mode = ( + SyncMode.incremental + if SyncMode.incremental in stream.supported_sync_modes + else SyncMode.full_refresh + ) + streams.append( + ConfiguredAirbyteStream( + stream=stream, + sync_mode=sync_mode, + destination_sync_mode=DestinationSyncMode.append, + ) + ) return ConfiguredAirbyteCatalog(streams=streams) class TestFacebookMarketingSource: @pytest.mark.parametrize( - "stream_name, deleted_id", [("ads", "23846756820320398"), ("campaigns", "23846541919710398"), ("ad_sets", "23846541706990398")] + "stream_name, deleted_id", + [ + ("ads", "23846756820320398"), + ("campaigns", "23846541919710398"), + ("ad_sets", "23846541706990398"), + ], ) - def test_streams_with_include_deleted(self, stream_name, deleted_id, config_with_include_deleted, configured_catalog): + def test_streams_with_include_deleted( + self, stream_name, deleted_id, config_with_include_deleted, configured_catalog + ): catalog = self._slice_catalog(configured_catalog, {stream_name}) records, states = self._read_records(config_with_include_deleted, catalog) deleted_records = list(filter(self._deleted_record, records)) is_specific_deleted_pulled = deleted_id in list(map(self._object_id, records)) + account_id = config_with_include_deleted["account_id"] assert states, "incremental read should produce states" for name, state in states[-1].state.data.items(): - assert "include_deleted" in state, f"State for {name} should include `include_deleted` flag" + assert ( + "filter_statuses" in state[account_id] + ), f"State for {name} should include `filter_statuses` flag" - assert deleted_records, f"{stream_name} stream should have deleted records returned" - assert is_specific_deleted_pulled, f"{stream_name} stream should have a deleted record with id={deleted_id}" + assert ( + deleted_records + ), f"{stream_name} stream should have deleted records returned" + assert ( + is_specific_deleted_pulled + ), f"{stream_name} stream should have a deleted record with id={deleted_id}" @pytest.mark.parametrize( - "stream_name, deleted_num, include_deleted_in_state", + "stream_name, deleted_num, filter_statuses", [ ("ads", 2, False), ("campaigns", 3, False), ("ad_sets", 1, False), - ("ads", 0, True), - ("campaigns", 0, True), - ("ad_sets", 0, True), + ( + "ads", + 0, + [ + "ACTIVE", + "ADSET_PAUSED", + "ARCHIVED", + "CAMPAIGN_PAUSED", + "DELETED", + "DISAPPROVED", + "IN_PROCESS", + "PAUSED", + "PENDING_BILLING_INFO", + "PENDING_REVIEW", + "PREAPPROVED", + "WITH_ISSUES", + ], + ), + ( + "campaigns", + 0, + [ + "ACTIVE", + "ARCHIVED", + "CAMPAIGN_PAUSED", + "DELETED", + "IN_PROCESS", + "PAUSED", + "WITH_ISSUES", + ], + ), + ( + "ad_sets", + 0, + [ + "ACTIVE", + "ARCHIVED", + "CAMPAIGN_PAUSED", + "DELETED", + "IN_PROCESS", + "PAUSED", + "WITH_ISSUES", + ], + ), ], ) def test_streams_with_include_deleted_and_state( - self, stream_name, deleted_num, include_deleted_in_state, config_with_include_deleted, configured_catalog, state + self, + stream_name, + deleted_num, + filter_statuses, + config_with_include_deleted, + configured_catalog, + state, ): - """Should ignore state because of include_deleted enabled""" - if include_deleted_in_state: + """Should ignore state because of filter_statuses changed""" + if filter_statuses: state = copy.deepcopy(state) for value in state.values(): - value["include_deleted"] = True + value["filter_statuses"] = filter_statuses catalog = self._slice_catalog(configured_catalog, {stream_name}) - records, states = self._read_records(config_with_include_deleted, catalog, state=state) + records, states = self._read_records( + config_with_include_deleted, catalog, state=state + ) deleted_records = list(filter(self._deleted_record, records)) - assert len(deleted_records) == deleted_num, f"{stream_name} should have {deleted_num} deleted records returned" + assert ( + len(deleted_records) == deleted_num + ), f"{stream_name} should have {deleted_num} deleted records returned" @staticmethod def _deleted_record(record: AirbyteMessage) -> bool: @@ -89,7 +164,9 @@ def _object_id(record: AirbyteMessage) -> str: return str(record.record.data["id"]) @staticmethod - def _slice_catalog(catalog: ConfiguredAirbyteCatalog, streams: Set[str]) -> ConfiguredAirbyteCatalog: + def _slice_catalog( + catalog: ConfiguredAirbyteCatalog, streams: Set[str] + ) -> ConfiguredAirbyteCatalog: sliced_catalog = ConfiguredAirbyteCatalog(streams=[]) for stream in catalog.streams: if stream.stream.name in streams: @@ -97,10 +174,14 @@ def _slice_catalog(catalog: ConfiguredAirbyteCatalog, streams: Set[str]) -> Conf return sliced_catalog @staticmethod - def _read_records(conf, catalog, state=None) -> Tuple[List[AirbyteMessage], List[AirbyteMessage]]: + def _read_records( + conf, catalog, state=None + ) -> Tuple[List[AirbyteMessage], List[AirbyteMessage]]: records = [] states = [] - for message in SourceFacebookMarketing().read(logging.getLogger("airbyte"), conf, catalog, state=state): + for message in SourceFacebookMarketing().read( + logging.getLogger("airbyte"), conf, catalog, state=state + ): if message.type == Type.RECORD: records.append(message) elif message.type == Type.STATE: diff --git a/airbyte-integrations/connectors/source-facebook-marketing/metadata.yaml b/airbyte-integrations/connectors/source-facebook-marketing/metadata.yaml index ae15d5dd7b85..2c989286b785 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/metadata.yaml +++ b/airbyte-integrations/connectors/source-facebook-marketing/metadata.yaml @@ -10,7 +10,7 @@ data: connectorSubtype: api connectorType: source definitionId: e7778cfc-e97c-4458-9ecb-b4f2bba8946c - dockerImageTag: 1.3.3 + dockerImageTag: 1.4.1 dockerRepository: airbyte/source-facebook-marketing documentationUrl: https://docs.airbyte.com/integrations/sources/facebook-marketing githubIssueLabel: source-facebook-marketing diff --git a/airbyte-integrations/connectors/source-facebook-marketing/pyproject.toml b/airbyte-integrations/connectors/source-facebook-marketing/pyproject.toml index 5a37a64ce856..7d7bd3063ddf 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/pyproject.toml +++ b/airbyte-integrations/connectors/source-facebook-marketing/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "1.3.3" +version = "1.4.1" name = "source-facebook-marketing" description = "Source implementation for Facebook Marketing." authors = [ "Airbyte ",] diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/api.py b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/api.py index edfd6195602e..31bf4644013d 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/api.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/api.py @@ -108,7 +108,10 @@ def _get_max_usage_pause_interval_from_batch(self, records): if "headers" not in record: continue headers = {header["name"].lower(): header["value"] for header in record["headers"]} - usage_from_response, pause_interval_from_response = self._parse_call_rate_header(headers) + ( + usage_from_response, + pause_interval_from_response, + ) = self._parse_call_rate_header(headers) usage = max(usage, usage_from_response) pause_interval = max(pause_interval_from_response, pause_interval) return usage, pause_interval diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/config_migrations.py b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/config_migrations.py index c8b6c7e109a2..f63b98ebd5b4 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/config_migrations.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/config_migrations.py @@ -10,6 +10,7 @@ from airbyte_cdk.entrypoint import AirbyteEntrypoint from airbyte_cdk.sources import Source from airbyte_cdk.sources.message import InMemoryMessageRepository, MessageRepository +from source_facebook_marketing.spec import ValidAdSetStatuses, ValidAdStatuses, ValidCampaignStatuses logger = logging.getLogger("airbyte_logger") @@ -80,3 +81,43 @@ def migrate(cls, args: List[str], source: Source) -> None: cls.emit_control_message( cls.modify_and_save(config_path, source, config), ) + + +class MigrateIncludeDeletedToStatusFilters(MigrateAccountIdToArray): + """ + This class stands for migrating the config at runtime. + This migration is backwards compatible with the previous version, as new property will be created. + When falling back to the previous source version connector will use old property `include_deleted`. + + Starting from `1.4.0`, the `include_deleted` property is replaced with `ad_statuses`, + `ad_statuses` and `campaign_statuses` which represent status filters. + """ + + migrate_from_key: str = "include_deleted" + migrate_to_key: str = "ad_statuses" + stream_filter_to_statuses: Mapping[str, List[str]] = { + "ad_statuses": [status.value for status in ValidAdStatuses], + "adset_statuses": [status.value for status in ValidAdSetStatuses], + "campaign_statuses": [status.value for status in ValidCampaignStatuses], + } + + @classmethod + def should_migrate(cls, config: Mapping[str, Any]) -> bool: + """ + This method determines whether the config should be migrated to have the new property for filters. + Returns: + > True, if the transformation is necessary + > False, otherwise. + > Raises the Exception if the structure could not be migrated. + """ + config_is_updated = config.get(cls.migrate_to_key) + no_include_deleted = not config.get(cls.migrate_from_key) + return False if config_is_updated or no_include_deleted else True + + @classmethod + def transform(cls, config: Mapping[str, Any]) -> Mapping[str, Any]: + # transform the config + for stream_filter, statuses in cls.stream_filter_to_statuses.items(): + config[stream_filter] = statuses + # return transformed config + return config diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/source.py b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/source.py index 18cc0840526a..0e4744be48f2 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/source.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/source.py @@ -149,7 +149,7 @@ def streams(self, config: Mapping[str, Any]) -> List[Type[Stream]]: account_ids=config.account_ids, start_date=config.start_date, end_date=config.end_date, - include_deleted=config.include_deleted, + filter_statuses=config.adset_statuses, page_size=config.page_size, ), Ads( @@ -157,7 +157,7 @@ def streams(self, config: Mapping[str, Any]) -> List[Type[Stream]]: account_ids=config.account_ids, start_date=config.start_date, end_date=config.end_date, - include_deleted=config.include_deleted, + filter_statuses=config.ad_statuses, page_size=config.page_size, ), AdCreatives( @@ -191,19 +191,17 @@ def streams(self, config: Mapping[str, Any]) -> List[Type[Stream]]: account_ids=config.account_ids, start_date=config.start_date, end_date=config.end_date, - include_deleted=config.include_deleted, + filter_statuses=config.campaign_statuses, page_size=config.page_size, ), CustomConversions( api=api, account_ids=config.account_ids, - include_deleted=config.include_deleted, page_size=config.page_size, ), CustomAudiences( api=api, account_ids=config.account_ids, - include_deleted=config.include_deleted, page_size=config.page_size, ), Images( @@ -211,7 +209,6 @@ def streams(self, config: Mapping[str, Any]) -> List[Type[Stream]]: account_ids=config.account_ids, start_date=config.start_date, end_date=config.end_date, - include_deleted=config.include_deleted, page_size=config.page_size, ), Videos( @@ -219,7 +216,6 @@ def streams(self, config: Mapping[str, Any]) -> List[Type[Stream]]: account_ids=config.account_ids, start_date=config.start_date, end_date=config.end_date, - include_deleted=config.include_deleted, page_size=config.page_size, ), Activities( @@ -227,7 +223,6 @@ def streams(self, config: Mapping[str, Any]) -> List[Type[Stream]]: account_ids=config.account_ids, start_date=config.start_date, end_date=config.end_date, - include_deleted=config.include_deleted, page_size=config.page_size, ), ] @@ -259,14 +254,23 @@ def spec(self, *args, **kwargs) -> ConnectorSpecification: }, complete_oauth_server_input_specification={ "type": "object", - "properties": {"client_id": {"type": "string"}, "client_secret": {"type": "string"}}, + "properties": { + "client_id": {"type": "string"}, + "client_secret": {"type": "string"}, + }, }, complete_oauth_server_output_specification={ "type": "object", "additionalProperties": True, "properties": { - "client_id": {"type": "string", "path_in_connector_config": ["client_id"]}, - "client_secret": {"type": "string", "path_in_connector_config": ["client_secret"]}, + "client_id": { + "type": "string", + "path_in_connector_config": ["client_id"], + }, + "client_secret": { + "type": "string", + "path_in_connector_config": ["client_secret"], + }, }, }, ), diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/spec.py b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/spec.py index 951ce0a2a63c..4b1b7a8a51e4 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/spec.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/spec.py @@ -8,7 +8,10 @@ from typing import List, Optional, Set from airbyte_cdk.sources.config import BaseConfig +from facebook_business.adobjects.ad import Ad +from facebook_business.adobjects.adset import AdSet from facebook_business.adobjects.adsinsights import AdsInsights +from facebook_business.adobjects.campaign import Campaign from pydantic import BaseModel, Field, PositiveInt, constr logger = logging.getLogger("airbyte") @@ -17,6 +20,9 @@ ValidFields = Enum("ValidEnums", AdsInsights.Field.__dict__) ValidBreakdowns = Enum("ValidBreakdowns", AdsInsights.Breakdowns.__dict__) ValidActionBreakdowns = Enum("ValidActionBreakdowns", AdsInsights.ActionBreakdowns.__dict__) +ValidCampaignStatuses = Enum("ValidCampaignStatuses", Campaign.EffectiveStatus.__dict__) +ValidAdSetStatuses = Enum("ValidAdSetStatuses", AdSet.EffectiveStatus.__dict__) +ValidAdStatuses = Enum("ValidAdStatuses", Ad.EffectiveStatus.__dict__) DATE_TIME_PATTERN = "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" EMPTY_PATTERN = "^$" @@ -32,7 +38,12 @@ class Config: description="The name value of insight", ) - level: str = Field(title="Level", description="Chosen level for API", default="ad", enum=["ad", "adset", "campaign", "account"]) + level: str = Field( + title="Level", + description="Chosen level for API", + default="ad", + enum=["ad", "adset", "campaign", "account"], + ) fields: Optional[List[ValidFields]] = Field( title="Fields", @@ -111,6 +122,7 @@ class ConnectorConfig(BaseConfig): class Config: title = "Source Facebook Marketing" + use_enum_values = True account_ids: Set[constr(regex="^[0-9]+$")] = Field( title="Ad Account ID(s)", @@ -162,23 +174,37 @@ class Config: default_factory=lambda: datetime.now(tz=timezone.utc), ) - include_deleted: bool = Field( - title="Include Deleted Campaigns, Ads, and AdSets", + campaign_statuses: Optional[List[ValidCampaignStatuses]] = Field( + title="Campaign Statuses", order=4, - default=False, - description="Set to active if you want to include data from deleted Campaigns, Ads, and AdSets.", + description="Select the statuses you want to be loaded in the stream. If no specific statuses are selected, the API's default behavior applies, and some statuses may be filtered out.", + default=[], + ) + + adset_statuses: Optional[List[ValidAdSetStatuses]] = Field( + title="AdSet Statuses", + order=5, + description="Select the statuses you want to be loaded in the stream. If no specific statuses are selected, the API's default behavior applies, and some statuses may be filtered out.", + default=[], + ) + + ad_statuses: Optional[List[ValidAdStatuses]] = Field( + title="Ad Statuses", + order=6, + description="Select the statuses you want to be loaded in the stream. If no specific statuses are selected, the API's default behavior applies, and some statuses may be filtered out.", + default=[], ) fetch_thumbnail_images: bool = Field( title="Fetch Thumbnail Images from Ad Creative", - order=5, + order=7, default=False, description="Set to active if you want to fetch the thumbnail_url and store the result in thumbnail_data_url for each Ad Creative.", ) custom_insights: Optional[List[InsightConfig]] = Field( title="Custom Insights", - order=6, + order=8, description=( "A list which contains ad statistics entries, each entry must have a name and can contains fields, " 'breakdowns or action_breakdowns. Click on "add" to fill this field.' @@ -187,7 +213,7 @@ class Config: page_size: Optional[PositiveInt] = Field( title="Page Size of Requests", - order=7, + order=10, default=100, description=( "Page size used when sending requests to Facebook API to specify number of records per page when response has pagination. " @@ -197,7 +223,7 @@ class Config: insights_lookback_window: Optional[PositiveInt] = Field( title="Insights Lookback Window", - order=8, + order=11, description=( "The attribution window. Facebook freezes insight data 28 days after it was generated, " "which means that all data from the past 28 days may have changed since we last emitted it, " @@ -211,7 +237,7 @@ class Config: insights_job_timeout: Optional[PositiveInt] = Field( title="Insights Job Timeout", - order=9, + order=12, description=( "Insights Job Timeout establishes the maximum amount of time (in minutes) of waiting for the report job to complete. " "When timeout is reached the job is considered failed and we are trying to request smaller amount of data by breaking the job to few smaller ones. " diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/async_job.py b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/async_job.py index 317be6673410..e8f1038c2bb9 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/async_job.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/async_job.py @@ -192,7 +192,13 @@ class InsightAsyncJob(AsyncJob): page_size = 100 - def __init__(self, edge_object: Union[AdAccount, Campaign, AdSet, Ad], params: Mapping[str, Any], job_timeout: Duration, **kwargs): + def __init__( + self, + edge_object: Union[AdAccount, Campaign, AdSet, Ad], + params: Mapping[str, Any], + job_timeout: Duration, + **kwargs, + ): """Initialize :param api: FB API @@ -254,7 +260,11 @@ def _split_by_edge_class(self, edge_class: Union[Type[Campaign], Type[AdSet], Ty jobs = [ InsightAsyncJob( - api=self._api, edge_object=edge_class(pk), params=self._params, interval=self._interval, job_timeout=self._job_timeout + api=self._api, + edge_object=edge_class(pk), + params=self._params, + interval=self._interval, + job_timeout=self._job_timeout, ) for pk in ids ] @@ -327,7 +337,11 @@ def update_job(self, batch: Optional[FacebookAdsApiBatch] = None): return if batch is not None: - self._job.api_get(batch=batch, success=self._batch_success_handler, failure=self._batch_failure_handler) + self._job.api_get( + batch=batch, + success=self._batch_success_handler, + failure=self._batch_failure_handler, + ) else: self._job = self._job.api_get() self._check_status() diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/async_job_manager.py b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/async_job_manager.py index f486fb314784..dc01cd228412 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/async_job_manager.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/async_job_manager.py @@ -104,7 +104,10 @@ def _check_jobs_status_and_restart(self) -> List[AsyncJob]: if job.attempt_number >= self.MAX_NUMBER_OF_ATTEMPTS: raise JobException(f"{job}: failed more than {self.MAX_NUMBER_OF_ATTEMPTS} times. Terminating...") elif job.attempt_number == 2: - logger.info("%s: failed second time, trying to split job into smaller jobs.", job) + logger.info( + "%s: failed second time, trying to split job into smaller jobs.", + job, + ) smaller_jobs = job.split_job() grouped_jobs = ParentAsyncJob(api=self._api.api, jobs=smaller_jobs, interval=job.interval) running_jobs.append(grouped_jobs) diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/base_insight_streams.py b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/base_insight_streams.py index 169ef7875405..af36033599c3 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/base_insight_streams.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/base_insight_streams.py @@ -25,7 +25,6 @@ class AdsInsights(FBMarketingIncrementalStream): """doc: https://developers.facebook.com/docs/marketing-api/insights""" cursor_field = "date_start" - enable_deleted = False ALL_ACTION_ATTRIBUTION_WINDOWS = [ "1d_click", @@ -175,7 +174,7 @@ def state(self, value: Mapping[str, Any]): # if the time increment configured for this stream is different from the one in the previous state # then the previous state object is invalid, and we should start replicating data from scratch # to achieve this, we skip setting the state - transformed_state = self._transform_state_from_old_format(value, ["time_increment"]) + transformed_state = self._transform_state_from_one_account_format(value, ["time_increment"]) if transformed_state.get("time_increment", 1) != self.time_increment: logger.info(f"Ignoring bookmark for {self.name} because of different `time_increment` option.") return @@ -193,7 +192,11 @@ def state(self, value: Mapping[str, Any]): self._next_cursor_values = self._get_start_date() - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]): + def get_updated_state( + self, + current_stream_state: MutableMapping[str, Any], + latest_record: Mapping[str, Any], + ): """Update stream state from latest record :param current_stream_state: latest state returned @@ -260,7 +263,10 @@ def _response_data_is_valid(self, data: Iterable[Mapping[str, Any]]) -> bool: return all([breakdown in data for breakdown in self.breakdowns]) def stream_slices( - self, sync_mode: SyncMode, cursor_field: List[str] = None, stream_state: Mapping[str, Any] = None + self, + sync_mode: SyncMode, + cursor_field: List[str] = None, + stream_state: Mapping[str, Any] = None, ) -> Iterable[Optional[Mapping[str, Any]]]: """Slice by date periods and schedule async job for each period, run at most MAX_ASYNC_JOBS jobs at the same time. This solution for Async was chosen because: diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/base_streams.py b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/base_streams.py index 9f396077df8a..895456feb9c5 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/base_streams.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/base_streams.py @@ -21,7 +21,6 @@ if TYPE_CHECKING: # pragma: no cover from source_facebook_marketing.api import API - logger = logging.getLogger("airbyte") @@ -31,9 +30,9 @@ class FBMarketingStream(Stream, ABC): primary_key = "id" transformer: TypeTransformer = TypeTransformer(TransformConfig.DefaultSchemaNormalization) - # this flag will override `include_deleted` option for streams that does not support it - enable_deleted = True - # entity prefix for `include_deleted` filter, it usually matches singular version of stream name + valid_statuses = [] + status_field = "" + # entity prefix for statuses filter, it usually matches singular version of stream name entity_prefix = None # In case of Error 'Too much data was requested in batch' some fields should be removed from request fields_exceptions = [] @@ -42,12 +41,19 @@ class FBMarketingStream(Stream, ABC): def availability_strategy(self) -> Optional["AvailabilityStrategy"]: return None - def __init__(self, api: "API", account_ids: List[str], include_deleted: bool = False, page_size: int = 100, **kwargs): + def __init__( + self, + api: "API", + account_ids: List[str], + filter_statuses: list = [], + page_size: int = 100, + **kwargs, + ): super().__init__(**kwargs) self._api = api self._account_ids = account_ids self.page_size = page_size if page_size is not None else 100 - self._include_deleted = include_deleted if self.enable_deleted else False + self._filter_statuses = filter_statuses self._fields = None def fields(self, **kwargs) -> List[str]: @@ -110,7 +116,7 @@ def get_account_state(self, account_id: str, stream_state: Mapping[str, Any] = N else: return {} - def _transform_state_from_old_format(self, state: Mapping[str, Any], move_fields: List[str] = None) -> Mapping[str, Any]: + def _transform_state_from_one_account_format(self, state: Mapping[str, Any], move_fields: List[str] = None) -> Mapping[str, Any]: """ Transforms the state from an old format to a new format based on account IDs. @@ -145,6 +151,19 @@ def _transform_state_from_old_format(self, state: Mapping[str, Any], move_fields # If the state is empty or there are multiple account IDs, return an empty dictionary. return {} + def _transform_state_from_old_deleted_format(self, state: Mapping[str, Any]): + # transform from the old format with `include_deleted` + for account_id in self._account_ids: + account_state = state.get(account_id, {}) + # check if the state for this account id is in the old format + if "filter_statuses" not in account_state and "include_deleted" in account_state: + if account_state["include_deleted"]: + account_state["filter_statuses"] = self.valid_statuses + else: + account_state["filter_statuses"] = [] + state[account_id] = account_state + return state + def read_records( self, sync_mode: SyncMode, @@ -157,7 +176,10 @@ def read_records( account_state = stream_slice.get("stream_state", {}) try: - for record in self.list_objects(params=self.request_params(stream_state=account_state), account_id=account_id): + for record in self.list_objects( + params=self.request_params(stream_state=account_state), + account_id=account_id, + ): if isinstance(record, AbstractObject): record = record.export_all_data() # convert FB object to dict self.fix_date_time(record) @@ -167,6 +189,10 @@ def read_records( raise traced_exception(exc) def stream_slices(self, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Optional[Mapping[str, any]]]: + if stream_state: + stream_state = self._transform_state_from_one_account_format(stream_state, ["include_deleted"]) + stream_state = self._transform_state_from_old_deleted_format(stream_state) + for account_id in self._account_ids: account_state = self.get_account_state(account_id, stream_state) yield {"account_id": account_id, "stream_state": account_state} @@ -182,36 +208,26 @@ def list_objects(self, params: Mapping[str, Any]) -> Iterable: def request_params(self, **kwargs) -> MutableMapping[str, Any]: """Parameters that should be passed to query_records method""" params = {"limit": self.page_size} - - if self._include_deleted: - params.update(self._filter_all_statuses()) + params.update(self._filter_all_statuses()) return params def _filter_all_statuses(self) -> MutableMapping[str, Any]: - """Filter that covers all possible statuses thus including deleted/archived records""" - filt_values = [ - "active", - "archived", - "completed", - "limited", - "not_delivering", - "deleted", - "not_published", - "pending_review", - "permanently_deleted", - "recently_completed", - "recently_rejected", - "rejected", - "scheduled", - "inactive", - ] - - return { - "filtering": [ - {"field": f"{self.entity_prefix}.delivery_info", "operator": "IN", "value": filt_values}, - ], - } + """Filter records by statuses""" + + return ( + { + "filtering": [ + { + "field": f"{self.entity_prefix}.{self.status_field}", + "operator": "IN", + "value": self._filter_statuses, + }, + ], + } + if self._filter_statuses and self.status_field + else {} + ) class FBMarketingIncrementalStream(FBMarketingStream, ABC): @@ -224,13 +240,20 @@ def __init__(self, start_date: Optional[datetime], end_date: Optional[datetime], self._start_date = pendulum.instance(start_date) if start_date else None self._end_date = pendulum.instance(end_date) if end_date else None - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]): + def get_updated_state( + self, + current_stream_state: MutableMapping[str, Any], + latest_record: Mapping[str, Any], + ): """Update stream state from latest record""" account_id = latest_record["account_id"] - state_for_accounts = self._transform_state_from_old_format(current_stream_state, ["include_deleted"]) + state_for_accounts = self._transform_state_from_one_account_format(current_stream_state, ["include_deleted"]) + state_for_accounts = self._transform_state_from_old_deleted_format(state_for_accounts) account_state = self.get_account_state(account_id, state_for_accounts) - potentially_new_records_in_the_past = self._include_deleted and not account_state.get("include_deleted", False) + potentially_new_records_in_the_past = self._filter_statuses and ( + set(self._filter_statuses) - set(account_state.get("filter_statuses", [])) + ) record_value = latest_record[self.cursor_field] state_value = account_state.get(self.cursor_field) or record_value max_cursor = max(pendulum.parse(state_value), pendulum.parse(record_value)) @@ -238,8 +261,8 @@ def get_updated_state(self, current_stream_state: MutableMapping[str, Any], late max_cursor = record_value state_for_accounts.setdefault(account_id, {})[self.cursor_field] = str(max_cursor) + state_for_accounts[account_id]["filter_statuses"] = self._filter_statuses - state_for_accounts["include_deleted"] = self._include_deleted return state_for_accounts def request_params(self, stream_state: Mapping[str, Any], **kwargs) -> MutableMapping[str, Any]: @@ -260,9 +283,10 @@ def _state_filter(self, stream_state: Mapping[str, Any]) -> Mapping[str, Any]: # if start_date is not specified then do not use date filters return {} - potentially_new_records_in_the_past = self._include_deleted and not stream_state.get("include_deleted", False) + potentially_new_records_in_the_past = set(self._filter_statuses) - set(stream_state.get("filter_statuses", [])) + if potentially_new_records_in_the_past: - self.logger.info(f"Ignoring bookmark for {self.name} because of enabled `include_deleted` option") + self.logger.info(f"Ignoring bookmark for {self.name} because `filter_statuses` were changed.") if self._start_date: filter_value = self._start_date else: @@ -283,8 +307,6 @@ def _state_filter(self, stream_state: Mapping[str, Any]) -> Mapping[str, Any]: class FBMarketingReversedIncrementalStream(FBMarketingIncrementalStream, ABC): """The base class for streams that don't support filtering and return records sorted desc by cursor_value""" - enable_deleted = False # API don't have any filtering, so implement include_deleted in code - def __init__(self, **kwargs): super().__init__(**kwargs) self._cursor_values = {} @@ -294,7 +316,7 @@ def state(self) -> Mapping[str, Any]: """State getter, get current state and serialize it to emmit Airbyte STATE message""" if self._cursor_values: result_state = {account_id: {self.cursor_field: cursor_value} for account_id, cursor_value in self._cursor_values.items()} - result_state["include_deleted"] = self._include_deleted + result_state["filter_statuses"] = self._filter_statuses return result_state return {} @@ -302,9 +324,11 @@ def state(self) -> Mapping[str, Any]: @state.setter def state(self, value: Mapping[str, Any]): """State setter, ignore state if current settings mismatch saved state""" - transformed_state = self._transform_state_from_old_format(value, ["include_deleted"]) - if self._include_deleted and not transformed_state.get("include_deleted"): - logger.info(f"Ignoring bookmark for {self.name} because of enabled `include_deleted` option") + transformed_state = self._transform_state_from_one_account_format(value, ["include_deleted"]) + transformed_state = self._transform_state_from_old_deleted_format(transformed_state) + + if set(self._filter_statuses) - set(transformed_state.get("filter_statuses", [])): + logger.info(f"Ignoring bookmark for {self.name} because of enabled `filter_statuses` option") return self._cursor_values = {} @@ -317,9 +341,6 @@ def _state_filter(self, stream_state: Mapping[str, Any]) -> Mapping[str, Any]: """Don't have classic cursor filtering""" return {} - def get_record_deleted_status(self, record) -> bool: - return False - def read_records( self, sync_mode: SyncMode, @@ -337,7 +358,10 @@ def read_records( account_state = stream_slice.get("stream_state") try: - records_iter = self.list_objects(params=self.request_params(stream_state=account_state), account_id=account_id) + records_iter = self.list_objects( + params=self.request_params(stream_state=account_state), + account_id=account_id, + ) account_cursor = self._cursor_values.get(account_id) max_cursor_value = None @@ -345,8 +369,6 @@ def read_records( record_cursor_value = pendulum.parse(record[self.cursor_field]) if account_cursor and record_cursor_value < account_cursor: break - if not self._include_deleted and self.get_record_deleted_status(record): - continue max_cursor_value = max(max_cursor_value, record_cursor_value) if max_cursor_value else record_cursor_value record = record.export_all_data() diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/common.py b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/common.py index a5776d8f7c32..b6e20f2c60a7 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/common.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/common.py @@ -15,7 +15,20 @@ # The Facebook API error codes indicating rate-limiting are listed at # https://developers.facebook.com/docs/graph-api/overview/rate-limiting/ -FACEBOOK_RATE_LIMIT_ERROR_CODES = (4, 17, 32, 613, 80000, 80001, 80002, 80003, 80004, 80005, 80006, 80008) +FACEBOOK_RATE_LIMIT_ERROR_CODES = ( + 4, + 17, + 32, + 613, + 80000, + 80001, + 80002, + 80003, + 80004, + 80005, + 80006, + 80008, +) FACEBOOK_TEMPORARY_OAUTH_ERROR_CODE = 2 FACEBOOK_BATCH_ERROR_CODE = 960 FACEBOOK_UNKNOWN_ERROR_CODE = 99 @@ -158,4 +171,9 @@ def traced_exception(fb_exception: FacebookRequestError): failure_type = FailureType.system_error friendly_msg = f"Error: {fb_exception.api_error_code()}, {fb_exception.api_error_message()}." - return AirbyteTracedException(message=friendly_msg or msg, internal_message=msg, failure_type=failure_type, exception=fb_exception) + return AirbyteTracedException( + message=friendly_msg or msg, + internal_message=msg, + failure_type=failure_type, + exception=fb_exception, + ) diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/streams.py b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/streams.py index fec7d321a1ed..d33e202a637b 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/streams.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/streams/streams.py @@ -13,6 +13,7 @@ from facebook_business.adobjects.adimage import AdImage from facebook_business.adobjects.user import User from facebook_business.exceptions import FacebookRequestError +from source_facebook_marketing.spec import ValidAdSetStatuses, ValidAdStatuses, ValidCampaignStatuses from .base_insight_streams import AdsInsights from .base_streams import FBMarketingIncrementalStream, FBMarketingReversedIncrementalStream, FBMarketingStream @@ -41,7 +42,6 @@ class AdCreatives(FBMarketingStream): """ entity_prefix = "adcreative" - enable_deleted = False def __init__(self, fetch_thumbnail_images: bool = False, **kwargs): super().__init__(**kwargs) @@ -78,7 +78,6 @@ class CustomConversions(FBMarketingStream): """doc: https://developers.facebook.com/docs/marketing-api/reference/custom-conversion""" entity_prefix = "customconversion" - enable_deleted = False def list_objects(self, params: Mapping[str, Any], account_id: str) -> Iterable: return self._api.get_account(account_id=account_id).get_custom_conversions(params=params, fields=self.fields()) @@ -88,7 +87,6 @@ class CustomAudiences(FBMarketingStream): """doc: https://developers.facebook.com/docs/marketing-api/reference/custom-audience""" entity_prefix = "customaudience" - enable_deleted = False # The `rule` field is excluded from the list because it caused the error message "Please reduce the amount of data" for certain connections. # https://github.com/airbytehq/oncall/issues/2765 fields_exceptions = ["rule"] @@ -101,6 +99,8 @@ class Ads(FBMarketingIncrementalStream): """doc: https://developers.facebook.com/docs/marketing-api/reference/adgroup""" entity_prefix = "ad" + status_field = "effective_status" + valid_statuses = [status.value for status in ValidAdStatuses] def list_objects(self, params: Mapping[str, Any], account_id: str) -> Iterable: return self._api.get_account(account_id=account_id).get_ads(params=params, fields=self.fields()) @@ -110,6 +110,8 @@ class AdSets(FBMarketingIncrementalStream): """doc: https://developers.facebook.com/docs/marketing-api/reference/ad-campaign""" entity_prefix = "adset" + status_field = "effective_status" + valid_statuses = [status.value for status in ValidAdSetStatuses] def list_objects(self, params: Mapping[str, Any], account_id: str) -> Iterable: return self._api.get_account(account_id=account_id).get_ad_sets(params=params, fields=self.fields()) @@ -119,6 +121,8 @@ class Campaigns(FBMarketingIncrementalStream): """doc: https://developers.facebook.com/docs/marketing-api/reference/ad-campaign-group""" entity_prefix = "campaign" + status_field = "effective_status" + valid_statuses = [status.value for status in ValidCampaignStatuses] def list_objects(self, params: Mapping[str, Any], account_id: str) -> Iterable: return self._api.get_account(account_id=account_id).get_campaigns(params=params, fields=self.fields()) @@ -153,9 +157,11 @@ def _state_filter(self, stream_state: Mapping[str, Any]) -> Mapping[str, Any]: # if start_date is not specified then do not use date filters return {} - potentially_new_records_in_the_past = self._include_deleted and not stream_state.get("include_deleted", False) + potentially_new_records_in_the_past = self._filter_statuses and ( + set(self._filter_statuses) - set(stream_state.get("filter_statuses", [])) + ) if potentially_new_records_in_the_past: - self.logger.info(f"Ignoring bookmark for {self.name} because of enabled `include_deleted` option") + self.logger.info(f"Ignoring bookmark for {self.name} because of enabled `filter_statuses` option") if self._start_date: since = self._start_date else: @@ -187,7 +193,6 @@ class AdAccount(FBMarketingStream): """See: https://developers.facebook.com/docs/marketing-api/reference/ad-account""" use_batch = False - enable_deleted = False def __init__(self, **kwargs): super().__init__(**kwargs) @@ -227,7 +232,6 @@ def list_objects(self, params: Mapping[str, Any], account_id: str) -> Iterable: """noop in case of AdAccount""" fields = self.fields(account_id=account_id) try: - print(f"{self._api.get_account(account_id=account_id).get_id()=} {account_id=}") return [FBAdAccount(self._api.get_account(account_id=account_id).get_id()).api_get(fields=fields)] except FacebookRequestError as e: # This is a workaround for cases when account seem to have all the required permissions diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/conftest.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/conftest.py index a7574ce206f9..7c0d34ae8139 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/conftest.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/conftest.py @@ -23,12 +23,19 @@ def account_id_fixture(): @fixture(scope="session", name="some_config") def some_config_fixture(account_id): - return {"start_date": "2021-01-23T00:00:00Z", "account_ids": [f"{account_id}"], "access_token": "unknown_token"} + return { + "start_date": "2021-01-23T00:00:00Z", + "account_ids": [f"{account_id}"], + "access_token": "unknown_token", + } @fixture(autouse=True) def mock_default_sleep_interval(mocker): - mocker.patch("source_facebook_marketing.streams.common.DEFAULT_SLEEP_INTERVAL", return_value=pendulum.duration(seconds=5)) + mocker.patch( + "source_facebook_marketing.streams.common.DEFAULT_SLEEP_INTERVAL", + return_value=pendulum.duration(seconds=5), + ) @fixture(name="fb_account_response") @@ -41,7 +48,12 @@ def fb_account_response_fixture(account_id): "id": f"act_{account_id}", } ], - "paging": {"cursors": {"before": "MjM4NDYzMDYyMTcyNTAwNzEZD", "after": "MjM4NDYzMDYyMTcyNTAwNzEZD"}}, + "paging": { + "cursors": { + "before": "MjM4NDYzMDYyMTcyNTAwNzEZD", + "after": "MjM4NDYzMDYyMTcyNTAwNzEZD", + } + }, }, "status_code": 200, } @@ -51,8 +63,15 @@ def fb_account_response_fixture(account_id): def api_fixture(some_config, requests_mock, fb_account_response): api = API(access_token=some_config["access_token"], page_size=100) - requests_mock.register_uri("GET", FacebookSession.GRAPH + f"/{FB_API_VERSION}/me/adaccounts", [fb_account_response]) requests_mock.register_uri( - "GET", FacebookSession.GRAPH + f"/{FB_API_VERSION}/act_{some_config['account_ids'][0]}/", [fb_account_response] + "GET", + FacebookSession.GRAPH + f"/{FB_API_VERSION}/me/adaccounts", + [fb_account_response], + ) + requests_mock.register_uri( + "GET", + FacebookSession.GRAPH + + f"/{FB_API_VERSION}/act_{some_config['account_ids'][0]}/", + [fb_account_response], ) return api diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_api.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_api.py index 29b2ccbfaaff..d8aae90765b7 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_api.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_api.py @@ -14,7 +14,9 @@ class TestMyFacebookAdsApi: @pytest.fixture def fb_api(self): - return source_facebook_marketing.api.MyFacebookAdsApi.init(access_token="foo", crash_log=False) + return source_facebook_marketing.api.MyFacebookAdsApi.init( + access_token="foo", crash_log=False + ) @pytest.mark.parametrize( "max_rate,max_pause_interval,min_pause_interval,usage,pause_interval,expected_pause_interval", @@ -46,7 +48,15 @@ def fb_api(self): ], ) def test__compute_pause_interval( - self, mocker, fb_api, max_rate, max_pause_interval, min_pause_interval, usage, pause_interval, expected_pause_interval + self, + mocker, + fb_api, + max_rate, + max_pause_interval, + min_pause_interval, + usage, + pause_interval, + expected_pause_interval, ): mocker.patch.object(fb_api, "MAX_RATE", max_rate) mocker.patch.object(fb_api, "MAX_PAUSE_INTERVAL", max_pause_interval) @@ -59,12 +69,18 @@ def test__compute_pause_interval( [ ( pendulum.duration(minutes=1), # min_pause_interval - [(5, pendulum.duration(minutes=6)), (7, pendulum.duration(minutes=5))], # usages_pause_intervals + [ + (5, pendulum.duration(minutes=6)), + (7, pendulum.duration(minutes=5)), + ], # usages_pause_intervals (7, pendulum.duration(minutes=6)), # expected_output ), ( pendulum.duration(minutes=10), # min_pause_interval - [(5, pendulum.duration(minutes=6)), (7, pendulum.duration(minutes=5))], # usages_pause_intervals + [ + (5, pendulum.duration(minutes=6)), + (7, pendulum.duration(minutes=5)), + ], # usages_pause_intervals (7, pendulum.duration(minutes=10)), # expected_output ), ( @@ -85,19 +101,36 @@ def test__compute_pause_interval( ), ], ) - def test__get_max_usage_pause_interval_from_batch(self, mocker, fb_api, min_pause_interval, usages_pause_intervals, expected_output): + def test__get_max_usage_pause_interval_from_batch( + self, + mocker, + fb_api, + min_pause_interval, + usages_pause_intervals, + expected_output, + ): records = [ - {"headers": [{"name": "USAGE", "value": usage}, {"name": "PAUSE_INTERVAL", "value": pause_interval}]} + { + "headers": [ + {"name": "USAGE", "value": usage}, + {"name": "PAUSE_INTERVAL", "value": pause_interval}, + ] + } for usage, pause_interval in usages_pause_intervals ] mock_parse_call_rate_header = mocker.Mock(side_effect=usages_pause_intervals) - mocker.patch.object(fb_api, "_parse_call_rate_header", mock_parse_call_rate_header) + mocker.patch.object( + fb_api, "_parse_call_rate_header", mock_parse_call_rate_header + ) mocker.patch.object(fb_api, "MIN_PAUSE_INTERVAL", min_pause_interval) output = fb_api._get_max_usage_pause_interval_from_batch(records) fb_api._parse_call_rate_header.assert_called_with( - {"usage": usages_pause_intervals[-1][0], "pause_interval": usages_pause_intervals[-1][1]} + { + "usage": usages_pause_intervals[-1][0], + "pause_interval": usages_pause_intervals[-1][1], + } ) assert output == expected_output @@ -112,30 +145,52 @@ def test__get_max_usage_pause_interval_from_batch(self, mocker, fb_api, min_paus (["not_batch"], 2, 1, False), ], ) - def test__handle_call_rate_limit(self, mocker, fb_api, params, min_rate, usage, expect_sleep): + def test__handle_call_rate_limit( + self, mocker, fb_api, params, min_rate, usage, expect_sleep + ): pause_interval = 1 mock_response = mocker.Mock() mocker.patch.object(fb_api, "MIN_RATE", min_rate) - mocker.patch.object(fb_api, "_get_max_usage_pause_interval_from_batch", mocker.Mock(return_value=(usage, pause_interval))) - mocker.patch.object(fb_api, "_parse_call_rate_header", mocker.Mock(return_value=(usage, pause_interval))) + mocker.patch.object( + fb_api, + "_get_max_usage_pause_interval_from_batch", + mocker.Mock(return_value=(usage, pause_interval)), + ) + mocker.patch.object( + fb_api, + "_parse_call_rate_header", + mocker.Mock(return_value=(usage, pause_interval)), + ) mocker.patch.object(fb_api, "_compute_pause_interval") mocker.patch.object(source_facebook_marketing.api, "logger") mocker.patch.object(source_facebook_marketing.api, "sleep") assert fb_api._handle_call_rate_limit(mock_response, params) is None if "batch" in params: - fb_api._get_max_usage_pause_interval_from_batch.assert_called_with(mock_response.json.return_value) + fb_api._get_max_usage_pause_interval_from_batch.assert_called_with( + mock_response.json.return_value + ) else: - fb_api._parse_call_rate_header.assert_called_with(mock_response.headers.return_value) + fb_api._parse_call_rate_header.assert_called_with( + mock_response.headers.return_value + ) if expect_sleep: - fb_api._compute_pause_interval.assert_called_with(usage=usage, pause_interval=pause_interval) - source_facebook_marketing.api.sleep.assert_called_with(fb_api._compute_pause_interval.return_value.total_seconds()) + fb_api._compute_pause_interval.assert_called_with( + usage=usage, pause_interval=pause_interval + ) + source_facebook_marketing.api.sleep.assert_called_with( + fb_api._compute_pause_interval.return_value.total_seconds() + ) source_facebook_marketing.api.logger.warning.assert_called_with( f"Utilization is too high ({usage})%, pausing for {fb_api._compute_pause_interval.return_value}" ) def test_find_account(self, api, account_id, requests_mock): - requests_mock.register_uri("GET", FacebookSession.GRAPH + f"/{FB_API_VERSION}/act_{account_id}/", [{"json": {"id": "act_test"}}]) + requests_mock.register_uri( + "GET", + FacebookSession.GRAPH + f"/{FB_API_VERSION}/act_{account_id}/", + [{"json": {"id": "act_test"}}], + ) account = api.get_account(account_id) assert isinstance(account, AdAccount) assert account.get_id() == "act_test" diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_async_job.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_async_job.py index a969999ee383..1c4bb0f67c37 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_async_job.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_async_job.py @@ -49,12 +49,27 @@ def job_fixture(api, account): } interval = pendulum.Period(pendulum.Date(2019, 1, 1), pendulum.Date(2019, 1, 1)) - return InsightAsyncJob(edge_object=account, api=api, interval=interval, params=params, job_timeout= pendulum.duration(minutes=60)) + return InsightAsyncJob( + edge_object=account, + api=api, + interval=interval, + params=params, + job_timeout=pendulum.duration(minutes=60), + ) @pytest.fixture(name="grouped_jobs") def grouped_jobs_fixture(mocker): - return [mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False, completed=False, elapsed_time=None) for _ in range(10)] + return [ + mocker.Mock( + spec=InsightAsyncJob, + attempt_number=1, + failed=False, + completed=False, + elapsed_time=None, + ) + for _ in range(10) + ] @pytest.fixture(name="parent_job") @@ -170,7 +185,10 @@ def test_start(self, job): def test_start_already_started(self, job): job.start() - with pytest.raises(RuntimeError, match=r": Incorrect usage of start - the job already started, use restart instead"): + with pytest.raises( + RuntimeError, + match=r": Incorrect usage of start - the job already started, use restart instead", + ): job.start() def test_restart(self, failed_job, api, adreport): @@ -185,15 +203,24 @@ def test_restart_when_job_not_failed(self, job, api): job.start() assert not job.failed - with pytest.raises(RuntimeError, match=r": Incorrect usage of restart - only failed jobs can be restarted"): + with pytest.raises( + RuntimeError, + match=r": Incorrect usage of restart - only failed jobs can be restarted", + ): job.restart() def test_restart_when_job_not_started(self, job): - with pytest.raises(RuntimeError, match=r": Incorrect usage of restart - only failed jobs can be restarted"): + with pytest.raises( + RuntimeError, + match=r": Incorrect usage of restart - only failed jobs can be restarted", + ): job.restart() def test_update_job_not_started(self, job): - with pytest.raises(RuntimeError, match=r": Incorrect usage of the method - the job is not started"): + with pytest.raises( + RuntimeError, + match=r": Incorrect usage of the method - the job is not started", + ): job.update_job() def test_update_job_on_completed_job(self, completed_job, adreport): @@ -240,7 +267,9 @@ def test_update_job_with_batch(self, started_job, adreport, mocker): kwargs["failure"](response) def test_elapsed_time(self, job, api, adreport): - assert job.elapsed_time is None, "should be None for the job that is not started" + assert ( + job.elapsed_time is None + ), "should be None for the job that is not started" job.start() adreport["async_status"] = Status.COMPLETED.value @@ -285,10 +314,13 @@ def test_str(self, api, account): api=api, params={"breakdowns": [10, 20]}, interval=interval, - job_timeout=pendulum.duration(minutes=60) + job_timeout=pendulum.duration(minutes=60), ) - assert str(job) == f"InsightAsyncJob(id=, {account}, time_range= 2011-01-01]>, breakdowns=[10, 20])" + assert ( + str(job) + == f"InsightAsyncJob(id=, {account}, time_range= 2011-01-01]>, breakdowns=[10, 20])" + ) def test_get_result(self, job, adreport, api): job.start() @@ -309,17 +341,26 @@ def test_get_result_retried(self, mocker, job, api): ads_insights._set_data({"items": [{"some_data": 123}, {"some_data": 77}]}) with mocker.patch( "facebook_business.adobjects.objectparser.ObjectParser.parse_multiple", - side_effect=[FacebookBadObjectError("Bad data to set object data"), ads_insights], + side_effect=[ + FacebookBadObjectError("Bad data to set object data"), + ads_insights, + ], ): # in case this is not retried, an error will be raised job.get_result() def test_get_result_when_job_is_not_started(self, job): - with pytest.raises(RuntimeError, match=r"Incorrect usage of get_result - the job is not started or failed"): + with pytest.raises( + RuntimeError, + match=r"Incorrect usage of get_result - the job is not started or failed", + ): job.get_result() def test_get_result_when_job_is_failed(self, failed_job): - with pytest.raises(RuntimeError, match=r"Incorrect usage of get_result - the job is not started or failed"): + with pytest.raises( + RuntimeError, + match=r"Incorrect usage of get_result - the job is not started or failed", + ): failed_job.get_result() @pytest.mark.parametrize( @@ -334,10 +375,22 @@ def test_get_result_when_job_is_failed(self, failed_job): def test_split_job(self, mocker, api, edge_class, next_edge_class, id_field): """Test that split will correctly downsize edge_object""" today = pendulum.today().date() - start, end = today - pendulum.duration(days=365 * 3 + 20), today - pendulum.duration(days=365 * 3 + 10) + start, end = today - pendulum.duration( + days=365 * 3 + 20 + ), today - pendulum.duration(days=365 * 3 + 10) params = {"time_increment": 1, "breakdowns": []} - job = InsightAsyncJob(api=api, edge_object=edge_class(1), interval=pendulum.Period(start, end), params=params, job_timeout=pendulum.duration(minutes=60)) - mocker.patch.object(edge_class, "get_insights", return_value=[{id_field: 1}, {id_field: 2}, {id_field: 3}]) + job = InsightAsyncJob( + api=api, + edge_object=edge_class(1), + interval=pendulum.Period(start, end), + params=params, + job_timeout=pendulum.duration(minutes=60), + ) + mocker.patch.object( + edge_class, + "get_insights", + return_value=[{id_field: 1}, {id_field: 2}, {id_field: 3}], + ) small_jobs = job.split_job() @@ -351,7 +404,9 @@ def test_split_job(self, mocker, api, edge_class, next_edge_class, id_field): # with the one 37 months ago, that's why current date is frozen. # For a different date the since date would be also different. # See facebook_marketing.utils.validate_start_date for reference - "since": (today - pendulum.duration(months=37) + pendulum.duration(days=1)).to_date_string(), + "since": ( + today - pendulum.duration(months=37) + pendulum.duration(days=1) + ).to_date_string(), "until": end.to_date_string(), }, } @@ -360,15 +415,28 @@ def test_split_job(self, mocker, api, edge_class, next_edge_class, id_field): assert all(j.interval == job.interval for j in small_jobs) for i, small_job in enumerate(small_jobs, start=1): assert small_job._params["time_range"] == job._params["time_range"] - assert str(small_job) == f"InsightAsyncJob(id=, {next_edge_class(i)}, time_range={job.interval}, breakdowns={[]})" + assert ( + str(small_job) + == f"InsightAsyncJob(id=, {next_edge_class(i)}, time_range={job.interval}, breakdowns={[]})" + ) def test_split_job_smallest(self, mocker, api): """Test that split will correctly downsize edge_object""" - interval = pendulum.Period(pendulum.Date(2010, 1, 1), pendulum.Date(2010, 1, 10)) + interval = pendulum.Period( + pendulum.Date(2010, 1, 1), pendulum.Date(2010, 1, 10) + ) params = {"time_increment": 1, "breakdowns": []} - job = InsightAsyncJob(api=api, edge_object=Ad(1), interval=interval, params=params, job_timeout=pendulum.duration(minutes=60)) + job = InsightAsyncJob( + api=api, + edge_object=Ad(1), + interval=interval, + params=params, + job_timeout=pendulum.duration(minutes=60), + ) - with pytest.raises(ValueError, match="The job is already splitted to the smallest size."): + with pytest.raises( + ValueError, match="The job is already splitted to the smallest size." + ): job.split_job() @@ -430,7 +498,10 @@ def test_get_result(self, parent_job, grouped_jobs): def test_split_job(self, parent_job, grouped_jobs, mocker): grouped_jobs[0].failed = True - grouped_jobs[0].split_job.return_value = [mocker.Mock(spec=InsightAsyncJob), mocker.Mock(spec=InsightAsyncJob)] + grouped_jobs[0].split_job.return_value = [ + mocker.Mock(spec=InsightAsyncJob), + mocker.Mock(spec=InsightAsyncJob), + ] grouped_jobs[5].failed = True grouped_jobs[5].split_job.return_value = [ mocker.Mock(spec=InsightAsyncJob), @@ -440,7 +511,9 @@ def test_split_job(self, parent_job, grouped_jobs, mocker): small_jobs = parent_job.split_job() - assert len(small_jobs) == len(grouped_jobs) + 5 - 2, "each failed job must be replaced with its split" + assert ( + len(small_jobs) == len(grouped_jobs) + 5 - 2 + ), "each failed job must be replaced with its split" for i, job in enumerate(grouped_jobs): if i in (0, 5): job.split_job.assert_called_once() @@ -462,4 +535,7 @@ def test_split_job_smallest(self, parent_job, grouped_jobs): count += 1 def test_str(self, parent_job, grouped_jobs): - assert str(parent_job) == f"ParentAsyncJob({grouped_jobs[0]} ... {len(grouped_jobs) - 1} jobs more)" + assert ( + str(parent_job) + == f"ParentAsyncJob({grouped_jobs[0]} ... {len(grouped_jobs) - 1} jobs more)" + ) diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_async_job_manager.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_async_job_manager.py index cb0cffffeabb..77d38e96a19f 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_async_job_manager.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_async_job_manager.py @@ -25,13 +25,17 @@ def time_mock_fixture(mocker): @pytest.fixture(name="update_job_mock") def update_job_mock_fixture(mocker): - return mocker.patch("source_facebook_marketing.streams.async_job_manager.update_in_batch") + return mocker.patch( + "source_facebook_marketing.streams.async_job_manager.update_in_batch" + ) class TestInsightAsyncManager: def test_jobs_empty(self, api, some_config): """Should work event without jobs""" - manager = InsightAsyncJobManager(api=api, jobs=[], account_id=some_config["account_ids"][0]) + manager = InsightAsyncJobManager( + api=api, jobs=[], account_id=some_config["account_ids"][0] + ) jobs = list(manager.completed_jobs()) assert not jobs @@ -41,7 +45,9 @@ def test_jobs_completed_immediately(self, api, mocker, time_mock, some_config): mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False), mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False), ] - manager = InsightAsyncJobManager(api=api, jobs=jobs, account_id=some_config["account_ids"][0]) + manager = InsightAsyncJobManager( + api=api, jobs=jobs, account_id=some_config["account_ids"][0] + ) completed_jobs = list(manager.completed_jobs()) assert jobs == completed_jobs time_mock.sleep.assert_not_called() @@ -58,10 +64,16 @@ def update_job_behaviour(): update_job_mock.side_effect = update_job_behaviour() jobs = [ - mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False, completed=False), - mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False, completed=False), + mocker.Mock( + spec=InsightAsyncJob, attempt_number=1, failed=False, completed=False + ), + mocker.Mock( + spec=InsightAsyncJob, attempt_number=1, failed=False, completed=False + ), ] - manager = InsightAsyncJobManager(api=api, jobs=jobs, account_id=some_config["account_ids"][0]) + manager = InsightAsyncJobManager( + api=api, jobs=jobs, account_id=some_config["account_ids"][0] + ) job = next(manager.completed_jobs(), None) assert job == jobs[1] @@ -69,7 +81,9 @@ def update_job_behaviour(): job = next(manager.completed_jobs(), None) assert job == jobs[0] - time_mock.sleep.assert_called_with(InsightAsyncJobManager.JOB_STATUS_UPDATE_SLEEP_SECONDS) + time_mock.sleep.assert_called_with( + InsightAsyncJobManager.JOB_STATUS_UPDATE_SLEEP_SECONDS + ) job = next(manager.completed_jobs(), None) assert job is None @@ -86,10 +100,16 @@ def update_job_behaviour(): update_job_mock.side_effect = update_job_behaviour() jobs = [ - mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False, completed=True), - mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False, completed=False), + mocker.Mock( + spec=InsightAsyncJob, attempt_number=1, failed=False, completed=True + ), + mocker.Mock( + spec=InsightAsyncJob, attempt_number=1, failed=False, completed=False + ), ] - manager = InsightAsyncJobManager(api=api, jobs=jobs, account_id=some_config["account_ids"][0]) + manager = InsightAsyncJobManager( + api=api, jobs=jobs, account_id=some_config["account_ids"][0] + ) job = next(manager.completed_jobs(), None) assert job == jobs[0] @@ -111,17 +131,27 @@ def update_job_behaviour(): update_job_mock.side_effect = update_job_behaviour() jobs = [ - mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False, completed=True), - mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False, completed=False), + mocker.Mock( + spec=InsightAsyncJob, attempt_number=1, failed=False, completed=True + ), + mocker.Mock( + spec=InsightAsyncJob, attempt_number=1, failed=False, completed=False + ), ] sub_jobs = [ - mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False, completed=True), - mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False, completed=True), + mocker.Mock( + spec=InsightAsyncJob, attempt_number=1, failed=False, completed=True + ), + mocker.Mock( + spec=InsightAsyncJob, attempt_number=1, failed=False, completed=True + ), ] sub_jobs[0].get_result.return_value = [1, 2] sub_jobs[1].get_result.return_value = [3, 4] jobs[1].split_job.return_value = sub_jobs - manager = InsightAsyncJobManager(api=api, jobs=jobs, account_id=some_config["account_ids"][0]) + manager = InsightAsyncJobManager( + api=api, jobs=jobs, account_id=some_config["account_ids"][0] + ) job = next(manager.completed_jobs(), None) assert job == jobs[0] @@ -134,7 +164,9 @@ def update_job_behaviour(): job = next(manager.completed_jobs(), None) assert job is None - def test_job_failed_too_many_times(self, api, mocker, time_mock, update_job_mock, some_config): + def test_job_failed_too_many_times( + self, api, mocker, time_mock, update_job_mock, some_config + ): """Manager should fail when job failed too many times""" def update_job_behaviour(): @@ -144,15 +176,26 @@ def update_job_behaviour(): update_job_mock.side_effect = update_job_behaviour() jobs = [ - mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False, completed=True), - mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False, completed=False), + mocker.Mock( + spec=InsightAsyncJob, attempt_number=1, failed=False, completed=True + ), + mocker.Mock( + spec=InsightAsyncJob, attempt_number=1, failed=False, completed=False + ), ] - manager = InsightAsyncJobManager(api=api, jobs=jobs, account_id=some_config["account_ids"][0]) - - with pytest.raises(JobException, match=f"{jobs[1]}: failed more than {InsightAsyncJobManager.MAX_NUMBER_OF_ATTEMPTS} times."): + manager = InsightAsyncJobManager( + api=api, jobs=jobs, account_id=some_config["account_ids"][0] + ) + + with pytest.raises( + JobException, + match=f"{jobs[1]}: failed more than {InsightAsyncJobManager.MAX_NUMBER_OF_ATTEMPTS} times.", + ): next(manager.completed_jobs(), None) - def test_nested_job_failed_too_many_times(self, api, mocker, time_mock, update_job_mock, some_config): + def test_nested_job_failed_too_many_times( + self, api, mocker, time_mock, update_job_mock, some_config + ): """Manager should fail when a nested job within a ParentAsyncJob failed too many times""" def update_job_behaviour(): @@ -163,14 +206,28 @@ def update_job_behaviour(): update_job_mock.side_effect = update_job_behaviour() sub_jobs = [ - mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False, completed=True), - mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False, completed=False), + mocker.Mock( + spec=InsightAsyncJob, attempt_number=1, failed=False, completed=True + ), + mocker.Mock( + spec=InsightAsyncJob, attempt_number=1, failed=False, completed=False + ), ] jobs = [ - mocker.Mock(spec=InsightAsyncJob, attempt_number=1, failed=False, completed=True), - mocker.Mock(spec=ParentAsyncJob, _jobs=sub_jobs, attempt_number=1, failed=False, completed=False), + mocker.Mock( + spec=InsightAsyncJob, attempt_number=1, failed=False, completed=True + ), + mocker.Mock( + spec=ParentAsyncJob, + _jobs=sub_jobs, + attempt_number=1, + failed=False, + completed=False, + ), ] - manager = InsightAsyncJobManager(api=api, jobs=jobs, account_id=some_config["account_ids"][0]) + manager = InsightAsyncJobManager( + api=api, jobs=jobs, account_id=some_config["account_ids"][0] + ) with pytest.raises(JobException): next(manager.completed_jobs(), None) diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_base_insight_streams.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_base_insight_streams.py index 3d6ef2aaa5e6..612ed22ef25a 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_base_insight_streams.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_base_insight_streams.py @@ -36,14 +36,18 @@ def start_date_fixture(): @pytest.fixture(name="async_manager_mock") def async_manager_mock_fixture(mocker): - mock = mocker.patch("source_facebook_marketing.streams.base_insight_streams.InsightAsyncJobManager") + mock = mocker.patch( + "source_facebook_marketing.streams.base_insight_streams.InsightAsyncJobManager" + ) mock.return_value = mock return mock @pytest.fixture(name="async_job_mock") def async_job_mock_fixture(mocker): - mock = mocker.patch("source_facebook_marketing.streams.base_insight_streams.InsightAsyncJob") + mock = mocker.patch( + "source_facebook_marketing.streams.base_insight_streams.InsightAsyncJob" + ) mock.side_effect = lambda api, **kwargs: {"api": api, **kwargs} @@ -58,7 +62,11 @@ def test_init(self, api, some_config): ) assert not stream.breakdowns - assert stream.action_breakdowns == ["action_type", "action_target_id", "action_destination"] + assert stream.action_breakdowns == [ + "action_type", + "action_target_id", + "action_destination", + ] assert stream.name == "ads_insights" assert stream.primary_key == ["date_start", "account_id", "ad_id"] assert stream.action_report_time == "mixed" @@ -78,7 +86,13 @@ def test_init_override(self, api, some_config): assert stream.breakdowns == ["test1", "test2"] assert stream.action_breakdowns == ["field1", "field2"] assert stream.name == "custom_name" - assert stream.primary_key == ["date_start", "account_id", "ad_id", "test1", "test2"] + assert stream.primary_key == [ + "date_start", + "account_id", + "ad_id", + "test1", + "test2", + ] def test_read_records_all(self, mocker, api, some_config): """1. yield all from mock @@ -87,7 +101,9 @@ def test_read_records_all(self, mocker, api, some_config): """ job = mocker.Mock(spec=InsightAsyncJob) job.get_result.return_value = [mocker.Mock(), mocker.Mock(), mocker.Mock()] - job.interval = pendulum.Period(pendulum.date(2010, 1, 1), pendulum.date(2010, 1, 1)) + job.interval = pendulum.Period( + pendulum.date(2010, 1, 1), pendulum.date(2010, 1, 1) + ) stream = AdsInsights( api=api, account_ids=some_config["account_ids"], @@ -99,7 +115,10 @@ def test_read_records_all(self, mocker, api, some_config): records = list( stream.read_records( sync_mode=SyncMode.incremental, - stream_slice={"insight_job": job, "account_id": some_config["account_ids"][0]}, + stream_slice={ + "insight_job": job, + "account_id": some_config["account_ids"][0], + }, ) ) @@ -112,7 +131,9 @@ def test_read_records_random_order(self, mocker, api, some_config): """ job = mocker.Mock(spec=AsyncJob) job.get_result.return_value = [mocker.Mock(), mocker.Mock(), mocker.Mock()] - job.interval = pendulum.Period(pendulum.date(2010, 1, 1), pendulum.date(2010, 1, 1)) + job.interval = pendulum.Period( + pendulum.date(2010, 1, 1), pendulum.date(2010, 1, 1) + ) stream = AdsInsights( api=api, account_ids=some_config["account_ids"], @@ -124,7 +145,10 @@ def test_read_records_random_order(self, mocker, api, some_config): records = list( stream.read_records( sync_mode=SyncMode.incremental, - stream_slice={"insight_job": job, "account_id": some_config["account_ids"][0]}, + stream_slice={ + "insight_job": job, + "account_id": some_config["account_ids"][0], + }, ) ) @@ -225,26 +249,39 @@ def test_state(self, api, state, result_state, some_config): insights_lookback_window=28, ) - assert stream.state == {"time_increment": 1, "unknown_account": {"slices": set()}} + assert stream.state == { + "time_increment": 1, + "unknown_account": {"slices": set()}, + } stream.state = state actual_state = stream.state result_state = state if not result_state else result_state - result_state[some_config["account_ids"][0]]["slices"] = result_state[some_config["account_ids"][0]].get("slices", set()) + result_state[some_config["account_ids"][0]]["slices"] = result_state[ + some_config["account_ids"][0] + ].get("slices", set()) result_state["time_increment"] = 1 assert actual_state == result_state - def test_stream_slices_no_state(self, api, async_manager_mock, start_date, some_config): + def test_stream_slices_no_state( + self, api, async_manager_mock, start_date, some_config + ): """Stream will use start_date when there is not state""" end_date = start_date + duration(weeks=2) stream = AdsInsights( - api=api, account_ids=some_config["account_ids"], start_date=start_date, end_date=end_date, insights_lookback_window=28 + api=api, + account_ids=some_config["account_ids"], + start_date=start_date, + end_date=end_date, + insights_lookback_window=28, ) async_manager_mock.completed_jobs.return_value = [1, 2, 3] - slices = list(stream.stream_slices(stream_state=None, sync_mode=SyncMode.incremental)) + slices = list( + stream.stream_slices(stream_state=None, sync_mode=SyncMode.incremental) + ) assert slices == [ {"account_id": "unknown_account", "insight_job": 1}, @@ -258,16 +295,24 @@ def test_stream_slices_no_state(self, api, async_manager_mock, start_date, some_ assert generated_jobs[0].interval.start == start_date.date() assert generated_jobs[1].interval.start == start_date.date() + duration(days=1) - def test_stream_slices_no_state_close_to_now(self, api, async_manager_mock, recent_start_date, some_config): + def test_stream_slices_no_state_close_to_now( + self, api, async_manager_mock, recent_start_date, some_config + ): """Stream will use start_date when there is not state and start_date within 28d from now""" start_date = recent_start_date end_date = pendulum.now() stream = AdsInsights( - api=api, account_ids=some_config["account_ids"], start_date=start_date, end_date=end_date, insights_lookback_window=28 + api=api, + account_ids=some_config["account_ids"], + start_date=start_date, + end_date=end_date, + insights_lookback_window=28, ) async_manager_mock.completed_jobs.return_value = [1, 2, 3] - slices = list(stream.stream_slices(stream_state=None, sync_mode=SyncMode.incremental)) + slices = list( + stream.stream_slices(stream_state=None, sync_mode=SyncMode.incremental) + ) assert slices == [ {"account_id": "unknown_account", "insight_job": 1}, @@ -281,17 +326,25 @@ def test_stream_slices_no_state_close_to_now(self, api, async_manager_mock, rece assert generated_jobs[0].interval.start == start_date.date() assert generated_jobs[1].interval.start == start_date.date() + duration(days=1) - def test_stream_slices_with_state(self, api, async_manager_mock, start_date, some_config): + def test_stream_slices_with_state( + self, api, async_manager_mock, start_date, some_config + ): """Stream will use cursor_value from state when there is state""" end_date = start_date + duration(days=10) cursor_value = start_date + duration(days=5) state = {AdsInsights.cursor_field: cursor_value.date().isoformat()} stream = AdsInsights( - api=api, account_ids=some_config["account_ids"], start_date=start_date, end_date=end_date, insights_lookback_window=28 + api=api, + account_ids=some_config["account_ids"], + start_date=start_date, + end_date=end_date, + insights_lookback_window=28, ) async_manager_mock.completed_jobs.return_value = [1, 2, 3] - slices = list(stream.stream_slices(stream_state=state, sync_mode=SyncMode.incremental)) + slices = list( + stream.stream_slices(stream_state=state, sync_mode=SyncMode.incremental) + ) assert slices == [ {"account_id": "unknown_account", "insight_job": 1}, @@ -302,21 +355,33 @@ def test_stream_slices_with_state(self, api, async_manager_mock, start_date, som args, kwargs = async_manager_mock.call_args generated_jobs = list(kwargs["jobs"]) assert len(generated_jobs) == (end_date - cursor_value).days - assert generated_jobs[0].interval.start == cursor_value.date() + duration(days=1) - assert generated_jobs[1].interval.start == cursor_value.date() + duration(days=2) + assert generated_jobs[0].interval.start == cursor_value.date() + duration( + days=1 + ) + assert generated_jobs[1].interval.start == cursor_value.date() + duration( + days=2 + ) - def test_stream_slices_with_state_close_to_now(self, api, async_manager_mock, recent_start_date, some_config): + def test_stream_slices_with_state_close_to_now( + self, api, async_manager_mock, recent_start_date, some_config + ): """Stream will use start_date when close to now and start_date close to now""" start_date = recent_start_date end_date = pendulum.now() cursor_value = end_date - duration(days=1) state = {AdsInsights.cursor_field: cursor_value.date().isoformat()} stream = AdsInsights( - api=api, account_ids=some_config["account_ids"], start_date=start_date, end_date=end_date, insights_lookback_window=28 + api=api, + account_ids=some_config["account_ids"], + start_date=start_date, + end_date=end_date, + insights_lookback_window=28, ) async_manager_mock.completed_jobs.return_value = [1, 2, 3] - slices = list(stream.stream_slices(stream_state=state, sync_mode=SyncMode.incremental)) + slices = list( + stream.stream_slices(stream_state=state, sync_mode=SyncMode.incremental) + ) assert slices == [ {"account_id": "unknown_account", "insight_job": 1}, @@ -331,7 +396,9 @@ def test_stream_slices_with_state_close_to_now(self, api, async_manager_mock, re assert generated_jobs[1].interval.start == start_date.date() + duration(days=1) @pytest.mark.parametrize("state_format", ["old_format", "new_format"]) - def test_stream_slices_with_state_and_slices(self, api, async_manager_mock, start_date, some_config, state_format): + def test_stream_slices_with_state_and_slices( + self, api, async_manager_mock, start_date, some_config, state_format + ): """Stream will use cursor_value from state, but will skip saved slices""" end_date = start_date + duration(days=10) cursor_value = start_date + duration(days=5) @@ -339,21 +406,33 @@ def test_stream_slices_with_state_and_slices(self, api, async_manager_mock, star if state_format == "old_format": state = { AdsInsights.cursor_field: cursor_value.date().isoformat(), - "slices": [(cursor_value + duration(days=1)).date().isoformat(), (cursor_value + duration(days=3)).date().isoformat()], + "slices": [ + (cursor_value + duration(days=1)).date().isoformat(), + (cursor_value + duration(days=3)).date().isoformat(), + ], } else: state = { "unknown_account": { AdsInsights.cursor_field: cursor_value.date().isoformat(), - "slices": [(cursor_value + duration(days=1)).date().isoformat(), (cursor_value + duration(days=3)).date().isoformat()], + "slices": [ + (cursor_value + duration(days=1)).date().isoformat(), + (cursor_value + duration(days=3)).date().isoformat(), + ], } } stream = AdsInsights( - api=api, account_ids=some_config["account_ids"], start_date=start_date, end_date=end_date, insights_lookback_window=28 + api=api, + account_ids=some_config["account_ids"], + start_date=start_date, + end_date=end_date, + insights_lookback_window=28, ) async_manager_mock.completed_jobs.return_value = [1, 2, 3] - slices = list(stream.stream_slices(stream_state=state, sync_mode=SyncMode.incremental)) + slices = list( + stream.stream_slices(stream_state=state, sync_mode=SyncMode.incremental) + ) assert slices == [ {"account_id": "unknown_account", "insight_job": 1}, @@ -363,9 +442,15 @@ def test_stream_slices_with_state_and_slices(self, api, async_manager_mock, star async_manager_mock.assert_called_once() args, kwargs = async_manager_mock.call_args generated_jobs = list(kwargs["jobs"]) - assert len(generated_jobs) == (end_date - cursor_value).days - 2, "should be 2 slices short because of state" - assert generated_jobs[0].interval.start == cursor_value.date() + duration(days=2) - assert generated_jobs[1].interval.start == cursor_value.date() + duration(days=4) + assert ( + len(generated_jobs) == (end_date - cursor_value).days - 2 + ), "should be 2 slices short because of state" + assert generated_jobs[0].interval.start == cursor_value.date() + duration( + days=2 + ) + assert generated_jobs[1].interval.start == cursor_value.date() + duration( + days=4 + ) def test_get_json_schema(self, api, some_config): stream = AdsInsights( @@ -380,7 +465,9 @@ def test_get_json_schema(self, api, some_config): assert "device_platform" not in schema["properties"] assert "country" not in schema["properties"] - assert not (set(stream.fields()) - set(schema["properties"].keys())), "all fields present in schema" + assert not ( + set(stream.fields()) - set(schema["properties"].keys()) + ), "all fields present in schema" def test_get_json_schema_custom(self, api, some_config): stream = AdsInsights( @@ -396,7 +483,9 @@ def test_get_json_schema_custom(self, api, some_config): assert "device_platform" in schema["properties"] assert "country" in schema["properties"] - assert not (set(stream.fields()) - set(schema["properties"].keys())), "all fields present in schema" + assert not ( + set(stream.fields()) - set(schema["properties"].keys()) + ), "all fields present in schema" def test_fields(self, api, some_config): stream = AdsInsights( @@ -425,7 +514,15 @@ def test_fields_custom(self, api, some_config): assert stream.fields() == ["account_id", "account_currency"] schema = stream.get_json_schema() - assert schema["properties"].keys() == set(["account_currency", "account_id", stream.cursor_field, "date_stop", "ad_id"]) + assert schema["properties"].keys() == set( + [ + "account_currency", + "account_id", + stream.cursor_field, + "date_stop", + "ad_id", + ] + ) def test_level_custom(self, api, some_config): stream = AdsInsights( diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_base_streams.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_base_streams.py index 66604660645f..4ddd72eab91d 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_base_streams.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_base_streams.py @@ -14,7 +14,11 @@ @pytest.fixture(name="mock_batch_responses") def mock_batch_responses_fixture(requests_mock): - return partial(requests_mock.register_uri, "POST", f"{FacebookSession.GRAPH}/{FacebookAdsApi.API_VERSION}/") + return partial( + requests_mock.register_uri, + "POST", + f"{FacebookSession.GRAPH}/{FacebookAdsApi.API_VERSION}/", + ) @pytest.fixture(name="batch") @@ -100,6 +104,7 @@ def test_date_time_value(self): class ConcreteFBMarketingIncrementalStream(FBMarketingIncrementalStream): cursor_field = "date" + valid_statuses = ["ACTIVE", "PAUSED", "DELETED"] def list_objects(self, **kwargs): return [] @@ -107,20 +112,32 @@ def list_objects(self, **kwargs): @pytest.fixture def incremental_class_instance(api): - return ConcreteFBMarketingIncrementalStream(api=api, account_ids=["123", "456", "789"], start_date=None, end_date=None) + return ConcreteFBMarketingIncrementalStream( + api=api, account_ids=["123", "456", "789"], start_date=None, end_date=None + ) class TestFBMarketingIncrementalStreamSliceAndState: - def test_stream_slices_multiple_accounts_with_state(self, incremental_class_instance): - stream_state = {"123": {"state_key": "state_value"}, "456": {"state_key": "another_state_value"}} + def test_stream_slices_multiple_accounts_with_state( + self, incremental_class_instance + ): + stream_state = { + "123": {"state_key": "state_value"}, + "456": {"state_key": "another_state_value"}, + } expected_slices = [ {"account_id": "123", "stream_state": {"state_key": "state_value"}}, {"account_id": "456", "stream_state": {"state_key": "another_state_value"}}, {"account_id": "789", "stream_state": {}}, ] - assert list(incremental_class_instance.stream_slices(stream_state)) == expected_slices + assert ( + list(incremental_class_instance.stream_slices(stream_state)) + == expected_slices + ) - def test_stream_slices_multiple_accounts_empty_state(self, incremental_class_instance): + def test_stream_slices_multiple_accounts_empty_state( + self, incremental_class_instance + ): expected_slices = [ {"account_id": "123", "stream_state": {}}, {"account_id": "456", "stream_state": {}}, @@ -132,18 +149,128 @@ def test_stream_slices_single_account_with_state(self, incremental_class_instanc incremental_class_instance._account_ids = ["123"] stream_state = {"state_key": "state_value"} expected_slices = [{"account_id": "123", "stream_state": stream_state}] - assert list(incremental_class_instance.stream_slices(stream_state)) == expected_slices + assert ( + list(incremental_class_instance.stream_slices(stream_state)) + == expected_slices + ) def test_stream_slices_single_account_empty_state(self, incremental_class_instance): incremental_class_instance._account_ids = ["123"] expected_slices = [{"account_id": "123", "stream_state": None}] assert list(incremental_class_instance.stream_slices()) == expected_slices - def test_get_updated_state(self, incremental_class_instance): - current_stream_state = {"123": {"date": "2021-01-15T00:00:00+00:00"}, "include_deleted": False} - latest_record = {"account_id": "123", "date": "2021-01-20T00:00:00+00:00"} - - expected_state = {"123": {"date": "2021-01-20T00:00:00+00:00", "include_deleted": False}, "include_deleted": False} + @pytest.mark.parametrize( + "current_stream_state, latest_record, expected_state, instance_filter_statuses", + [ + # Test case 1: State date is used because fewer filters are used + ( + {"123": {"date": "2021-01-30T00:00:00+00:00", "include_deleted": True}, "include_deleted": True}, + {"account_id": "123", "date": "2021-01-20T00:00:00+00:00"}, + { + "123": { + "date": "2021-01-30T00:00:00+00:00", + "filter_statuses": ["ACTIVE"], + "include_deleted": True, + }, + "include_deleted": True, + }, + ["ACTIVE"], + ), + # Test case 2: State date is used because filter_statuses is the same as include_deleted + ( + {"123": {"date": "2021-01-30T00:00:00+00:00", "include_deleted": True}, "include_deleted": True}, + {"account_id": "123", "date": "2021-01-20T00:00:00+00:00"}, + { + "123": { + "date": "2021-01-30T00:00:00+00:00", + "filter_statuses": ["ACTIVE", "PAUSED", "DELETED"], + "include_deleted": True, + }, + "include_deleted": True, + }, + ["ACTIVE", "PAUSED", "DELETED"], + ), + # Test case 3: State date is used because filter_statuses is the same as include_deleted + ( + { + "123": { + "date": "2023-02-15T00:00:00+00:00", + "include_deleted": False, + } + }, + {"account_id": "123", "date": "2021-01-20T00:00:00+00:00"}, + { + "123": { + "date": "2023-02-15T00:00:00+00:00", + "filter_statuses": [], + "include_deleted": False, + } + }, + [], + ), + # Test case 4: State date is ignored because there are more filters in the new config + ( + { + "123": { + "date": "2023-02-15T00:00:00+00:00", + "include_deleted": False, + } + }, + {"account_id": "123", "date": "2021-01-20T00:00:00+00:00"}, + { + "123": { + "date": "2021-01-20T00:00:00+00:00", + "filter_statuses": ["ACTIVE", "PAUSED"], + "include_deleted": False, + } + }, + ["ACTIVE", "PAUSED"], + ), + # Test case 5: Mismatching filter_statuses with include_deleted false + ( + { + "123": { + "date": "2023-02-15T00:00:00+00:00", + "filter_statuses": ["PAUSED"], + "include_deleted": False, + } + }, + {"account_id": "123", "date": "2021-01-20T00:00:00+00:00"}, + { + "123": { + "date": "2021-01-20T00:00:00+00:00", + "filter_statuses": ["ACTIVE"], + "include_deleted": False, + } + }, + ["ACTIVE"], + ), + # Test case 6: No filter_statuses or include_deleted in state, instance has filter_statuses + ( + {"123": {"date": "2023-02-15T00:00:00+00:00"}}, + {"account_id": "123", "date": "2021-01-20T00:00:00+00:00"}, + { + "123": { + "date": "2021-01-20T00:00:00+00:00", + "filter_statuses": ["ACTIVE"], + } + }, + ["ACTIVE"], + ), + ], + ) + def test_get_updated_state( + self, + incremental_class_instance, + current_stream_state, + latest_record, + expected_state, + instance_filter_statuses, + ): + # Set the instance's filter_statuses + incremental_class_instance._filter_statuses = instance_filter_statuses - new_state = incremental_class_instance.get_updated_state(current_stream_state, latest_record) + new_state = incremental_class_instance.get_updated_state( + current_stream_state, latest_record + ) assert new_state == expected_state diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_client.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_client.py index 0d862aab6f31..5395bab12a5c 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_client.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_client.py @@ -28,7 +28,11 @@ def fb_call_rate_response_fixture(): "fbtrace_id": "this_is_fake_response", } - headers = {"x-app-usage": json.dumps({"call_count": 28, "total_time": 25, "total_cputime": 25})} + headers = { + "x-app-usage": json.dumps( + {"call_count": 28, "total_time": 25, "total_cputime": 25} + ) + } return { "json": { @@ -41,7 +45,10 @@ def fb_call_rate_response_fixture(): @pytest.fixture(name="fb_call_amount_data_response") def fb_call_amount_data_response_fixture(): - error = {"message": "Please reduce the amount of data you're asking for, then retry your request", "code": 1} + error = { + "message": "Please reduce the amount of data you're asking for, then retry your request", + "code": 1, + } return { "json": { @@ -52,29 +59,61 @@ def fb_call_amount_data_response_fixture(): class TestBackoff: - def test_limit_reached(self, mocker, requests_mock, api, fb_call_rate_response, account_id, some_config): + def test_limit_reached( + self, mocker, requests_mock, api, fb_call_rate_response, account_id, some_config + ): """Error once, check that we retry and not fail""" # turn Campaigns into non batch mode to test non batch logic campaign_responses = [ fb_call_rate_response, { - "json": {"data": [{"id": 1, "updated_time": "2020-09-25T00:00:00Z"}, {"id": 2, "updated_time": "2020-09-25T00:00:00Z"}]}, + "json": { + "data": [ + {"id": 1, "updated_time": "2020-09-25T00:00:00Z"}, + {"id": 2, "updated_time": "2020-09-25T00:00:00Z"}, + ] + }, "status_code": 200, }, ] - requests_mock.register_uri("GET", FacebookSession.GRAPH + f"/{FB_API_VERSION}/act_{account_id}/campaigns", campaign_responses) - requests_mock.register_uri("GET", FacebookSession.GRAPH + f"/{FB_API_VERSION}/1/", [{"status_code": 200}]) - requests_mock.register_uri("GET", FacebookSession.GRAPH + f"/{FB_API_VERSION}/2/", [{"status_code": 200}]) + requests_mock.register_uri( + "GET", + FacebookSession.GRAPH + f"/{FB_API_VERSION}/act_{account_id}/campaigns", + campaign_responses, + ) + requests_mock.register_uri( + "GET", + FacebookSession.GRAPH + f"/{FB_API_VERSION}/1/", + [{"status_code": 200}], + ) + requests_mock.register_uri( + "GET", + FacebookSession.GRAPH + f"/{FB_API_VERSION}/2/", + [{"status_code": 200}], + ) - stream = Campaigns(api=api, account_ids=[account_id], start_date=pendulum.now(), end_date=pendulum.now(), include_deleted=False) + stream = Campaigns( + api=api, + account_ids=[account_id], + start_date=pendulum.now(), + end_date=pendulum.now(), + ) try: - records = list(stream.read_records(sync_mode=SyncMode.full_refresh, stream_state={}, stream_slice={"account_id": account_id})) + records = list( + stream.read_records( + sync_mode=SyncMode.full_refresh, + stream_state={}, + stream_slice={"account_id": account_id}, + ) + ) assert records except FacebookRequestError: pytest.fail("Call rate error has not being handled") - def test_batch_limit_reached(self, requests_mock, api, fb_call_rate_response, account_id): + def test_batch_limit_reached( + self, requests_mock, api, fb_call_rate_response, account_id + ): """Error once, check that we retry and not fail""" responses = [ fb_call_rate_response, @@ -101,22 +140,56 @@ def test_batch_limit_reached(self, requests_mock, api, fb_call_rate_response, ac fb_call_rate_response, { "json": [ - {"body": json.dumps({"name": "creative 1"}), "code": 200, "headers": {}}, - {"body": json.dumps({"name": "creative 2"}), "code": 200, "headers": {}}, + { + "body": json.dumps({"name": "creative 1"}), + "code": 200, + "headers": {}, + }, + { + "body": json.dumps({"name": "creative 2"}), + "code": 200, + "headers": {}, + }, ] }, ] - requests_mock.register_uri("GET", FacebookSession.GRAPH + f"/{FB_API_VERSION}/act_{account_id}/adcreatives", responses) - requests_mock.register_uri("GET", FacebookSession.GRAPH + f"/{FB_API_VERSION}/act_{account_id}/", responses) - requests_mock.register_uri("POST", FacebookSession.GRAPH + f"/{FB_API_VERSION}/", batch_responses) + requests_mock.register_uri( + "GET", + FacebookSession.GRAPH + f"/{FB_API_VERSION}/act_{account_id}/adcreatives", + responses, + ) + requests_mock.register_uri( + "GET", + FacebookSession.GRAPH + f"/{FB_API_VERSION}/act_{account_id}/", + responses, + ) + requests_mock.register_uri( + "POST", FacebookSession.GRAPH + f"/{FB_API_VERSION}/", batch_responses + ) - stream = AdCreatives(api=api, account_ids=[account_id], include_deleted=False) - records = list(stream.read_records(sync_mode=SyncMode.full_refresh, stream_state={}, stream_slice={"account_id": account_id})) + stream = AdCreatives(api=api, account_ids=[account_id]) + records = list( + stream.read_records( + sync_mode=SyncMode.full_refresh, + stream_state={}, + stream_slice={"account_id": account_id}, + ) + ) assert records == [ - {"account_id": "unknown_account", "id": "123", "object_type": "SHARE", "status": "ACTIVE"}, - {"account_id": "unknown_account", "id": "1234", "object_type": "SHARE", "status": "ACTIVE"}, + { + "account_id": "unknown_account", + "id": "123", + "object_type": "SHARE", + "status": "ACTIVE", + }, + { + "account_id": "unknown_account", + "id": "1234", + "object_type": "SHARE", + "status": "ACTIVE", + }, ] @pytest.mark.parametrize( @@ -130,7 +203,12 @@ def test_batch_limit_reached(self, requests_mock, api, fb_call_rate_response, ac ) def test_common_error_retry(self, error_response, requests_mock, api, account_id): """Error once, check that we retry and not fail""" - account_data = {"account_id": "unknown_account", "id": 1, "updated_time": "2020-09-25T00:00:00Z", "name": "Some name"} + account_data = { + "account_id": "unknown_account", + "id": 1, + "updated_time": "2020-09-25T00:00:00Z", + "name": "Some name", + } responses = [ error_response, { @@ -139,29 +217,67 @@ def test_common_error_retry(self, error_response, requests_mock, api, account_id }, ] - requests_mock.register_uri("GET", FacebookSession.GRAPH + f"/{FB_API_VERSION}/me/business_users", json={"data": []}) - requests_mock.register_uri("GET", FacebookSession.GRAPH + f"/{FB_API_VERSION}/act_{account_id}/", responses) - requests_mock.register_uri("GET", FacebookSession.GRAPH + f"/{FB_API_VERSION}/{account_data['id']}/", responses) + requests_mock.register_uri( + "GET", + FacebookSession.GRAPH + f"/{FB_API_VERSION}/me/business_users", + json={"data": []}, + ) + requests_mock.register_uri( + "GET", + FacebookSession.GRAPH + f"/{FB_API_VERSION}/act_{account_id}/", + responses, + ) + requests_mock.register_uri( + "GET", + FacebookSession.GRAPH + f"/{FB_API_VERSION}/{account_data['id']}/", + responses, + ) stream = AdAccount(api=api, account_ids=[account_id]) - accounts = list(stream.read_records(sync_mode=SyncMode.full_refresh, stream_state={}, stream_slice={"account_id": account_id})) + accounts = list( + stream.read_records( + sync_mode=SyncMode.full_refresh, + stream_state={}, + stream_slice={"account_id": account_id}, + ) + ) assert accounts == [account_data] - def test_limit_error_retry(self, fb_call_amount_data_response, requests_mock, api, account_id): + def test_limit_error_retry( + self, fb_call_amount_data_response, requests_mock, api, account_id + ): """Error every time, check limit parameter decreases by 2 times every new call""" res = requests_mock.register_uri( - "GET", FacebookSession.GRAPH + f"/{FB_API_VERSION}/act_{account_id}/campaigns", [fb_call_amount_data_response] + "GET", + FacebookSession.GRAPH + f"/{FB_API_VERSION}/act_{account_id}/campaigns", + [fb_call_amount_data_response], ) stream = Campaigns( - api=api, account_ids=[account_id], start_date=pendulum.now(), end_date=pendulum.now(), include_deleted=False, page_size=100 + api=api, + account_ids=[account_id], + start_date=pendulum.now(), + end_date=pendulum.now(), + page_size=100, ) try: - list(stream.read_records(sync_mode=SyncMode.full_refresh, stream_state={}, stream_slice={"account_id": account_id})) + list( + stream.read_records( + sync_mode=SyncMode.full_refresh, + stream_state={}, + stream_slice={"account_id": account_id}, + ) + ) except AirbyteTracedException: - assert [x.qs.get("limit")[0] for x in res.request_history] == ["100", "50", "25", "12", "6"] + assert [x.qs.get("limit")[0] for x in res.request_history] == [ + "100", + "50", + "25", + "12", + "6", + ] def test_limit_error_retry_revert_page_size(self, requests_mock, api, account_id): """Error every time, check limit parameter decreases by 2 times every new call""" @@ -195,12 +311,27 @@ def test_limit_error_retry_revert_page_size(self, requests_mock, api, account_id ) stream = Activities( - api=api, account_ids=[account_id], start_date=pendulum.now(), end_date=pendulum.now(), include_deleted=False, page_size=100 + api=api, + account_ids=[account_id], + start_date=pendulum.now(), + end_date=pendulum.now(), + page_size=100, ) try: - list(stream.read_records(sync_mode=SyncMode.full_refresh, stream_state={}, stream_slice={"account_id": account_id})) + list( + stream.read_records( + sync_mode=SyncMode.full_refresh, + stream_state={}, + stream_slice={"account_id": account_id}, + ) + ) except FacebookRequestError: - assert [x.qs.get("limit")[0] for x in res.request_history] == ["100", "50", "100", "50"] + assert [x.qs.get("limit")[0] for x in res.request_history] == [ + "100", + "50", + "100", + "50", + ] def test_start_date_not_provided(self, requests_mock, api, account_id): success = { @@ -222,12 +353,28 @@ def test_start_date_not_provided(self, requests_mock, api, account_id): [success], ) - stream = Activities(api=api, account_ids=[account_id], start_date=None, end_date=None, include_deleted=False, page_size=100) - list(stream.read_records(sync_mode=SyncMode.full_refresh, stream_state={}, stream_slice={"account_id": account_id})) + stream = Activities( + api=api, + account_ids=[account_id], + start_date=None, + end_date=None, + page_size=100, + ) + list( + stream.read_records( + sync_mode=SyncMode.full_refresh, + stream_state={}, + stream_slice={"account_id": account_id}, + ) + ) - def test_limit_error_retry_next_page(self, fb_call_amount_data_response, requests_mock, api, account_id): + def test_limit_error_retry_next_page( + self, fb_call_amount_data_response, requests_mock, api, account_id + ): """Unlike the previous test, this one tests the API call fail on the second or more page of a request.""" - base_url = FacebookSession.GRAPH + f"/{FB_API_VERSION}/act_{account_id}/advideos" + base_url = ( + FacebookSession.GRAPH + f"/{FB_API_VERSION}/act_{account_id}/advideos" + ) res = requests_mock.register_uri( "GET", @@ -235,7 +382,10 @@ def test_limit_error_retry_next_page(self, fb_call_amount_data_response, request [ { "json": { - "data": [{"id": 1, "updated_time": "2020-09-25T00:00:00Z"}, {"id": 2, "updated_time": "2020-09-25T00:00:00Z"}], + "data": [ + {"id": 1, "updated_time": "2020-09-25T00:00:00Z"}, + {"id": 2, "updated_time": "2020-09-25T00:00:00Z"}, + ], "paging": {"next": f"{base_url}?after=after_page_1&limit=100"}, }, "status_code": 200, @@ -245,9 +395,26 @@ def test_limit_error_retry_next_page(self, fb_call_amount_data_response, request ) stream = Videos( - api=api, account_ids=[account_id], start_date=pendulum.now(), end_date=pendulum.now(), include_deleted=False, page_size=100 + api=api, + account_ids=[account_id], + start_date=pendulum.now(), + end_date=pendulum.now(), + page_size=100, ) try: - list(stream.read_records(sync_mode=SyncMode.full_refresh, stream_state={}, stream_slice={"account_id": account_id})) + list( + stream.read_records( + sync_mode=SyncMode.full_refresh, + stream_state={}, + stream_slice={"account_id": account_id}, + ) + ) except AirbyteTracedException: - assert [x.qs.get("limit")[0] for x in res.request_history] == ["100", "100", "50", "25", "12", "6"] + assert [x.qs.get("limit")[0] for x in res.request_history] == [ + "100", + "100", + "50", + "25", + "12", + "6", + ] diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_config_migrations.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_config_migrations.py index 092b855396c1..d72b4ce6c3e2 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_config_migrations.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_config_migrations.py @@ -6,82 +6,144 @@ import json from typing import Any, Mapping +import pytest from airbyte_cdk.models import OrchestratorType, Type from airbyte_cdk.sources import Source -from source_facebook_marketing.config_migrations import MigrateAccountIdToArray +from source_facebook_marketing.config_migrations import MigrateAccountIdToArray, MigrateIncludeDeletedToStatusFilters from source_facebook_marketing.source import SourceFacebookMarketing # BASE ARGS CMD = "check" -TEST_CONFIG_PATH = "unit_tests/test_migrations/test_old_config.json" -NEW_TEST_CONFIG_PATH = "unit_tests/test_migrations/test_new_config.json" -UPGRADED_TEST_CONFIG_PATH = "unit_tests/test_migrations/test_upgraded_config.json" -SOURCE_INPUT_ARGS = [CMD, "--config", TEST_CONFIG_PATH] SOURCE: Source = SourceFacebookMarketing() # HELPERS -def load_config(config_path: str = TEST_CONFIG_PATH) -> Mapping[str, Any]: +def load_config(config_path: str) -> Mapping[str, Any]: with open(config_path, "r") as config: return json.load(config) -def revert_migration(config_path: str = TEST_CONFIG_PATH) -> None: - with open(config_path, "r") as test_config: - config = json.load(test_config) - config.pop("account_ids") - with open(config_path, "w") as updated_config: - config = json.dumps(config) - updated_config.write(config) - - -def test_migrate_config(): - migration_instance = MigrateAccountIdToArray() - original_config = load_config() - # migrate the test_config - migration_instance.migrate(SOURCE_INPUT_ARGS, SOURCE) - # load the updated config - test_migrated_config = load_config() - # check migrated property - assert "account_ids" in test_migrated_config - assert isinstance(test_migrated_config["account_ids"], list) - # check the old property is in place - assert "account_id" in test_migrated_config - assert isinstance(test_migrated_config["account_id"], str) - # check the migration should be skipped, once already done - assert not migration_instance.should_migrate(test_migrated_config) - # load the old custom reports VS migrated - assert [original_config["account_id"]] == test_migrated_config["account_ids"] - # test CONTROL MESSAGE was emitted - control_msg = migration_instance.message_repository._message_queue[0] - assert control_msg.type == Type.CONTROL - assert control_msg.control.type == OrchestratorType.CONNECTOR_CONFIG - # old custom_reports are stil type(str) - assert isinstance(control_msg.control.connectorConfig.config["account_id"], str) - # new custom_reports are type(list) - assert isinstance(control_msg.control.connectorConfig.config["account_ids"], list) - # check the migrated values - assert control_msg.control.connectorConfig.config["account_ids"] == ["01234567890"] - # revert the test_config to the starting point - revert_migration() - - -def test_config_is_reverted(): - # check the test_config state, it has to be the same as before tests - test_config = load_config() - # check the config no longer has the migarted property - assert "account_ids" not in test_config - # check the old property is still there - assert "account_id" in test_config - assert isinstance(test_config["account_id"], str) - - -def test_should_not_migrate_new_config(): - new_config = load_config(NEW_TEST_CONFIG_PATH) - migration_instance = MigrateAccountIdToArray() - assert not migration_instance.should_migrate(new_config) - -def test_should_not_migrate_upgraded_config(): - new_config = load_config(UPGRADED_TEST_CONFIG_PATH) - migration_instance = MigrateAccountIdToArray() - assert not migration_instance.should_migrate(new_config) +class TestMigrateAccountIdToArray: + TEST_CONFIG_PATH = "unit_tests/test_migrations/account_id_to_array/test_old_config.json" + NEW_TEST_CONFIG_PATH = "unit_tests/test_migrations/account_id_to_array/test_new_config.json" + UPGRADED_TEST_CONFIG_PATH = "unit_tests/test_migrations/account_id_to_array/test_upgraded_config.json" + + @staticmethod + def revert_migration(config_path: str = TEST_CONFIG_PATH) -> None: + with open(config_path, "r") as test_config: + config = json.load(test_config) + config.pop("account_ids") + with open(config_path, "w") as updated_config: + config = json.dumps(config) + updated_config.write(config) + + def test_migrate_config(self): + migration_instance = MigrateAccountIdToArray() + original_config = load_config(self.TEST_CONFIG_PATH) + # migrate the test_config + migration_instance.migrate([CMD, "--config", self.TEST_CONFIG_PATH], SOURCE) + # load the updated config + test_migrated_config = load_config(self.TEST_CONFIG_PATH) + # check migrated property + assert "account_ids" in test_migrated_config + assert isinstance(test_migrated_config["account_ids"], list) + # check the old property is in place + assert "account_id" in test_migrated_config + assert isinstance(test_migrated_config["account_id"], str) + # check the migration should be skipped, once already done + assert not migration_instance.should_migrate(test_migrated_config) + # load the old custom reports VS migrated + assert [original_config["account_id"]] == test_migrated_config["account_ids"] + # test CONTROL MESSAGE was emitted + control_msg = migration_instance.message_repository._message_queue[0] + assert control_msg.type == Type.CONTROL + assert control_msg.control.type == OrchestratorType.CONNECTOR_CONFIG + # old custom_reports are stil type(str) + assert isinstance(control_msg.control.connectorConfig.config["account_id"], str) + # new custom_reports are type(list) + assert isinstance(control_msg.control.connectorConfig.config["account_ids"], list) + # check the migrated values + assert control_msg.control.connectorConfig.config["account_ids"] == ["01234567890"] + # revert the test_config to the starting point + self.revert_migration() + + def test_config_is_reverted(self): + # check the test_config state, it has to be the same as before tests + test_config = load_config(self.TEST_CONFIG_PATH) + # check the config no longer has the migarted property + assert "account_ids" not in test_config + # check the old property is still there + assert "account_id" in test_config + assert isinstance(test_config["account_id"], str) + + def test_should_not_migrate_new_config(self): + new_config = load_config(self.NEW_TEST_CONFIG_PATH) + migration_instance = MigrateAccountIdToArray() + assert not migration_instance.should_migrate(new_config) + + def test_should_not_migrate_upgraded_config(self): + new_config = load_config(self.UPGRADED_TEST_CONFIG_PATH) + migration_instance = MigrateAccountIdToArray() + assert not migration_instance.should_migrate(new_config) + + +class TestMigrateIncludeDeletedToStatusFilters: + OLD_TEST1_CONFIG_PATH = "unit_tests/test_migrations/include_deleted_to_status_filters/include_deleted_false/test_old_config.json" + NEW_TEST1_CONFIG_PATH = "unit_tests/test_migrations/include_deleted_to_status_filters/include_deleted_false/test_new_config.json" + OLD_TEST2_CONFIG_PATH = "unit_tests/test_migrations/include_deleted_to_status_filters/include_deleted_true/test_old_config.json" + NEW_TEST2_CONFIG_PATH = "unit_tests/test_migrations/include_deleted_to_status_filters/include_deleted_true/test_new_config.json" + + UPGRADED_TEST_CONFIG_PATH = "unit_tests/test_migrations/account_id_to_array/test_upgraded_config.json" + + filter_properties = ["ad_statuses", "adset_statuses", "campaign_statuses"] + + def revert_migration(self, config_path: str) -> None: + with open(config_path, "r") as test_config: + config = json.load(test_config) + for filter in self.filter_properties: + config.pop(filter) + with open(config_path, "w") as updated_config: + config = json.dumps(config) + updated_config.write(config) + + @pytest.mark.parametrize( + "old_config_path, new_config_path, include_deleted", + [(OLD_TEST1_CONFIG_PATH, NEW_TEST1_CONFIG_PATH, False), (OLD_TEST2_CONFIG_PATH, NEW_TEST2_CONFIG_PATH, True)], + ) + def test_migrate_config(self, old_config_path, new_config_path, include_deleted): + migration_instance = MigrateIncludeDeletedToStatusFilters() + original_config = load_config(old_config_path) + # migrate the test_config + migration_instance.migrate([CMD, "--config", old_config_path], SOURCE) + # load the updated config + test_migrated_config = load_config(old_config_path) + # load expected updated config + expected_new_config = load_config(new_config_path) + # compare expected with migrated + assert expected_new_config == test_migrated_config + # check migrated property + if include_deleted: + assert all([filter in test_migrated_config for filter in self.filter_properties]) + # check the old property is in place + assert "include_deleted" in test_migrated_config + assert test_migrated_config["include_deleted"] == include_deleted + # check the migration should be skipped, once already done + assert not migration_instance.should_migrate(test_migrated_config) + if include_deleted: + # test CONTROL MESSAGE was emitted + control_msg = migration_instance.message_repository._message_queue[0] + assert control_msg.type == Type.CONTROL + assert control_msg.control.type == OrchestratorType.CONNECTOR_CONFIG + # revert the test_config to the starting point + self.revert_migration(old_config_path) + + @pytest.mark.parametrize("new_config_path", [NEW_TEST1_CONFIG_PATH, NEW_TEST2_CONFIG_PATH]) + def test_should_not_migrate_new_config(self, new_config_path): + new_config = load_config(new_config_path) + migration_instance = MigrateIncludeDeletedToStatusFilters() + assert not migration_instance.should_migrate(new_config) + + def test_should_not_migrate_upgraded_config(self): + new_config = load_config(self.UPGRADED_TEST_CONFIG_PATH) + migration_instance = MigrateIncludeDeletedToStatusFilters() + assert not migration_instance.should_migrate(new_config) diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_deep_merge.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_deep_merge.py index ca0b43393fef..75214acff1ab 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_deep_merge.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_deep_merge.py @@ -17,7 +17,10 @@ def test_return_new_object(): "key_2": [1, 2], } right = {"key_1": {"two": "right_value", "three": [1, 2, 3]}, "key_2": [3]} - expected_result = {"key_1": {"one": {"a", "b"}, "two": "right_value", "three": [1, 2, 3]}, "key_2": [1, 2, 3]} + expected_result = { + "key_1": {"one": {"a", "b"}, "two": "right_value", "three": [1, 2, 3]}, + "key_2": [1, 2, 3], + } result = deep_merge(deepcopy(left), deepcopy(right)) diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_errors.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_errors.py index 105306b25f55..46bbbbccd0d5 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_errors.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_errors.py @@ -15,7 +15,11 @@ FB_API_VERSION = FacebookAdsApi.API_VERSION account_id = "unknown_account" -some_config = {"start_date": "2021-01-23T00:00:00Z", "account_ids": [account_id], "access_token": "unknown_token"} +some_config = { + "start_date": "2021-01-23T00:00:00Z", + "account_ids": [account_id], + "access_token": "unknown_token", +} base_url = f"{FacebookSession.GRAPH}/{FB_API_VERSION}/" act_url = f"{base_url}act_{account_id}/" @@ -26,8 +30,18 @@ } } ad_creative_data = [ - {"account_id": account_id, "id": "111111", "name": "ad creative 1", "updated_time": "2023-03-21T22:33:56-0700"}, - {"account_id": account_id, "id": "222222", "name": "ad creative 2", "updated_time": "2023-03-22T22:33:56-0700"}, + { + "account_id": account_id, + "id": "111111", + "name": "ad creative 1", + "updated_time": "2023-03-21T22:33:56-0700", + }, + { + "account_id": account_id, + "id": "222222", + "name": "ad creative 2", + "updated_time": "2023-03-22T22:33:56-0700", + }, ] ad_creative_response = { "json": { @@ -239,13 +253,23 @@ class TestRealErrors: }, }, "status_code": 400, - "headers": {"x-app-usage": json.dumps({"call_count": 28, "total_time": 25, "total_cputime": 25})}, + "headers": { + "x-app-usage": json.dumps( + {"call_count": 28, "total_time": 25, "total_cputime": 25} + ) + }, }, ), ( "error_500_unknown", { - "json": {"error": {"code": 1, "message": "An unknown error occurred", "error_subcode": 99}}, + "json": { + "error": { + "code": 1, + "message": "An unknown error occurred", + "error_subcode": 99, + } + }, "status_code": 500, }, ), @@ -282,33 +306,52 @@ class TestRealErrors: ), ], ) - def test_retryable_error(self, some_config, requests_mock, name, retryable_error_response): + def test_retryable_error( + self, some_config, requests_mock, name, retryable_error_response + ): """Error once, check that we retry and not fail""" requests_mock.reset_mock() - requests_mock.register_uri("GET", f"{act_url}", [retryable_error_response, ad_account_response]) - requests_mock.register_uri("GET", f"{act_url}adcreatives", [retryable_error_response, ad_creative_response]) + requests_mock.register_uri( + "GET", f"{act_url}", [retryable_error_response, ad_account_response] + ) + requests_mock.register_uri( + "GET", + f"{act_url}adcreatives", + [retryable_error_response, ad_creative_response], + ) api = API(access_token=some_config["access_token"], page_size=100) - stream = AdCreatives(api=api, account_ids=some_config["account_ids"], include_deleted=False) + stream = AdCreatives(api=api, account_ids=some_config["account_ids"]) ad_creative_records = list( - stream.read_records(sync_mode=SyncMode.full_refresh, stream_state={}, stream_slice={"account_id": account_id}) + stream.read_records( + sync_mode=SyncMode.full_refresh, + stream_state={}, + stream_slice={"account_id": account_id}, + ) ) assert ad_creative_records == ad_creative_data - # requests_mock.register_uri("GET", f"{self.act_url}advideos", [error_400_service_temporarily_unavailable, ad_creative_response]) - # stream = Videos(api=api, start_date=pendulum.now(), end_date=pendulum.now(), include_deleted=False, page_size=100) - @pytest.mark.parametrize("name, friendly_msg, config_error_response", CONFIG_ERRORS) - def test_config_error_during_account_info_read(self, requests_mock, name, friendly_msg, config_error_response): + def test_config_error_during_account_info_read( + self, requests_mock, name, friendly_msg, config_error_response + ): """Error raised during account info read""" api = API(access_token=some_config["access_token"], page_size=100) - stream = AdCreatives(api=api, account_ids=some_config["account_ids"], include_deleted=False) + stream = AdCreatives(api=api, account_ids=some_config["account_ids"]) - requests_mock.register_uri("GET", f"{act_url}", [config_error_response, ad_account_response]) + requests_mock.register_uri( + "GET", f"{act_url}", [config_error_response, ad_account_response] + ) try: - list(stream.read_records(sync_mode=SyncMode.full_refresh, stream_state={}, stream_slice={"account_id": account_id})) + list( + stream.read_records( + sync_mode=SyncMode.full_refresh, + stream_state={}, + stream_slice={"account_id": account_id}, + ) + ) assert False except Exception as error: assert isinstance(error, AirbyteTracedException) @@ -317,16 +360,28 @@ def test_config_error_during_account_info_read(self, requests_mock, name, friend # @pytest.mark.parametrize("name, friendly_msg, config_error_response", [CONFIG_ERRORS[-1]]) @pytest.mark.parametrize("name, friendly_msg, config_error_response", CONFIG_ERRORS) - def test_config_error_during_actual_nodes_read(self, requests_mock, name, friendly_msg, config_error_response): + def test_config_error_during_actual_nodes_read( + self, requests_mock, name, friendly_msg, config_error_response + ): """Error raised during actual nodes read""" api = API(access_token=some_config["access_token"], page_size=100) - stream = AdCreatives(api=api, account_ids=some_config["account_ids"], include_deleted=False) + stream = AdCreatives(api=api, account_ids=some_config["account_ids"]) requests_mock.register_uri("GET", f"{act_url}", [ad_account_response]) - requests_mock.register_uri("GET", f"{act_url}adcreatives", [config_error_response, ad_creative_response]) + requests_mock.register_uri( + "GET", + f"{act_url}adcreatives", + [config_error_response, ad_creative_response], + ) try: - list(stream.read_records(sync_mode=SyncMode.full_refresh, stream_state={}, stream_slice={"account_id": account_id})) + list( + stream.read_records( + sync_mode=SyncMode.full_refresh, + stream_state={}, + stream_slice={"account_id": account_id}, + ) + ) assert False except Exception as error: assert isinstance(error, AirbyteTracedException) @@ -334,7 +389,9 @@ def test_config_error_during_actual_nodes_read(self, requests_mock, name, friend assert friendly_msg in error.message @pytest.mark.parametrize("name, friendly_msg, config_error_response", CONFIG_ERRORS) - def test_config_error_insights_account_info_read(self, requests_mock, name, friendly_msg, config_error_response): + def test_config_error_insights_account_info_read( + self, requests_mock, name, friendly_msg, config_error_response + ): """Error raised during actual nodes read""" api = API(access_token=some_config["access_token"], page_size=100) @@ -346,18 +403,30 @@ def test_config_error_insights_account_info_read(self, requests_mock, name, frie fields=["account_id", "account_currency"], insights_lookback_window=28, ) - requests_mock.register_uri("GET", f"{act_url}", [config_error_response, ad_account_response]) + requests_mock.register_uri( + "GET", f"{act_url}", [config_error_response, ad_account_response] + ) try: - slice = list(stream.stream_slices(sync_mode=SyncMode.full_refresh, stream_state={}))[0] - list(stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=slice, stream_state={})) + slice = list( + stream.stream_slices(sync_mode=SyncMode.full_refresh, stream_state={}) + )[0] + list( + stream.read_records( + sync_mode=SyncMode.full_refresh, stream_slice=slice, stream_state={} + ) + ) assert False except Exception as error: assert isinstance(error, AirbyteTracedException) assert error.failure_type == FailureType.config_error assert friendly_msg in error.message - @pytest.mark.parametrize("name, friendly_msg, config_error_response", [CONFIG_ERRORS[0]]) - def test_config_error_insights_during_actual_nodes_read(self, requests_mock, name, friendly_msg, config_error_response): + @pytest.mark.parametrize( + "name, friendly_msg, config_error_response", [CONFIG_ERRORS[0]] + ) + def test_config_error_insights_during_actual_nodes_read( + self, requests_mock, name, friendly_msg, config_error_response + ): """Error raised during actual nodes read""" api = API(access_token=some_config["access_token"], page_size=100) @@ -370,11 +439,19 @@ def test_config_error_insights_during_actual_nodes_read(self, requests_mock, nam insights_lookback_window=28, ) requests_mock.register_uri("GET", f"{act_url}", [ad_account_response]) - requests_mock.register_uri("GET", f"{act_url}insights", [config_error_response, ad_creative_response]) + requests_mock.register_uri( + "GET", f"{act_url}insights", [config_error_response, ad_creative_response] + ) try: - slice = list(stream.stream_slices(sync_mode=SyncMode.full_refresh, stream_state={}))[0] - list(stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=slice, stream_state={})) + slice = list( + stream.stream_slices(sync_mode=SyncMode.full_refresh, stream_state={}) + )[0] + list( + stream.read_records( + sync_mode=SyncMode.full_refresh, stream_slice=slice, stream_state={} + ) + ) assert False except Exception as error: assert isinstance(error, AirbyteTracedException) @@ -421,14 +498,29 @@ def test_adaccount_list_objects_retry(self, requests_mock, failure_response): account_ids=some_config["account_ids"], ) - business_user = {"account_id": account_id, "business": {"id": "1", "name": "TEST"}} - requests_mock.register_uri("GET", f"{base_url}me/business_users", status_code=200, json=business_user) + business_user = { + "account_id": account_id, + "business": {"id": "1", "name": "TEST"}, + } + requests_mock.register_uri( + "GET", f"{base_url}me/business_users", status_code=200, json=business_user + ) assigend_users = {"account_id": account_id, "tasks": ["TASK"]} - requests_mock.register_uri("GET", f"{act_url}assigned_users", status_code=200, json=assigend_users) + requests_mock.register_uri( + "GET", f"{act_url}assigned_users", status_code=200, json=assigend_users + ) success_response = {"status_code": 200, "json": {"account_id": account_id}} - requests_mock.register_uri("GET", f"{act_url}", [failure_response, success_response]) + requests_mock.register_uri( + "GET", f"{act_url}", [failure_response, success_response] + ) - record_gen = stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice={"account_id": account_id}, stream_state={}) - assert list(record_gen) == [{"account_id": "unknown_account", "id": "act_unknown_account"}] + record_gen = stream.read_records( + sync_mode=SyncMode.full_refresh, + stream_slice={"account_id": account_id}, + stream_state={}, + ) + assert list(record_gen) == [ + {"account_id": "unknown_account", "id": "act_unknown_account"} + ] diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_migrations/test_new_config.json b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_migrations/account_id_to_array/test_new_config.json similarity index 100% rename from airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_migrations/test_new_config.json rename to airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_migrations/account_id_to_array/test_new_config.json diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_migrations/test_old_config.json b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_migrations/account_id_to_array/test_old_config.json similarity index 100% rename from airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_migrations/test_old_config.json rename to airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_migrations/account_id_to_array/test_old_config.json diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_migrations/test_upgraded_config.json b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_migrations/account_id_to_array/test_upgraded_config.json similarity index 100% rename from airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_migrations/test_upgraded_config.json rename to airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_migrations/account_id_to_array/test_upgraded_config.json diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_migrations/include_deleted_to_status_filters/include_deleted_false/test_new_config.json b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_migrations/include_deleted_to_status_filters/include_deleted_false/test_new_config.json new file mode 100644 index 000000000000..d054e1bae501 --- /dev/null +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_migrations/include_deleted_to_status_filters/include_deleted_false/test_new_config.json @@ -0,0 +1,15 @@ +{ + "start_date": "2021-02-08T00:00:00Z", + "end_date": "2021-02-15T00:00:00Z", + "custom_insights": [ + { + "name": "custom_insight_stream", + "fields": ["account_name", "clicks", "cpc", "account_id", "ad_id"], + "breakdowns": ["gender"], + "action_breakdowns": [] + } + ], + "account_ids": ["01234567890"], + "access_token": "access_token", + "include_deleted": false +} diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_migrations/include_deleted_to_status_filters/include_deleted_false/test_old_config.json b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_migrations/include_deleted_to_status_filters/include_deleted_false/test_old_config.json new file mode 100644 index 000000000000..72dcc27afbdf --- /dev/null +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_migrations/include_deleted_to_status_filters/include_deleted_false/test_old_config.json @@ -0,0 +1,15 @@ +{ + "start_date": "2021-02-08T00:00:00Z", + "end_date": "2021-02-15T00:00:00Z", + "custom_insights": [ + { + "name": "custom_insight_stream", + "fields": ["account_name", "clicks", "cpc", "account_id", "ad_id"], + "breakdowns": ["gender"], + "action_breakdowns": [] + } + ], + "include_deleted": false, + "account_ids": ["01234567890"], + "access_token": "access_token" +} diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_migrations/include_deleted_to_status_filters/include_deleted_true/test_new_config.json b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_migrations/include_deleted_to_status_filters/include_deleted_true/test_new_config.json new file mode 100644 index 000000000000..e579fa634de8 --- /dev/null +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_migrations/include_deleted_to_status_filters/include_deleted_true/test_new_config.json @@ -0,0 +1,46 @@ +{ + "start_date": "2021-02-08T00:00:00Z", + "end_date": "2021-02-15T00:00:00Z", + "custom_insights": [ + { + "name": "custom_insight_stream", + "fields": ["account_name", "clicks", "cpc", "account_id", "ad_id"], + "breakdowns": ["gender"], + "action_breakdowns": [] + } + ], + "include_deleted": true, + "account_ids": ["01234567890"], + "access_token": "access_token", + "ad_statuses": [ + "ACTIVE", + "ADSET_PAUSED", + "ARCHIVED", + "CAMPAIGN_PAUSED", + "DELETED", + "DISAPPROVED", + "IN_PROCESS", + "PAUSED", + "PENDING_BILLING_INFO", + "PENDING_REVIEW", + "PREAPPROVED", + "WITH_ISSUES" + ], + "adset_statuses": [ + "ACTIVE", + "ARCHIVED", + "CAMPAIGN_PAUSED", + "DELETED", + "IN_PROCESS", + "PAUSED", + "WITH_ISSUES" + ], + "campaign_statuses": [ + "ACTIVE", + "ARCHIVED", + "DELETED", + "IN_PROCESS", + "PAUSED", + "WITH_ISSUES" + ] +} diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_migrations/include_deleted_to_status_filters/include_deleted_true/test_old_config.json b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_migrations/include_deleted_to_status_filters/include_deleted_true/test_old_config.json new file mode 100644 index 000000000000..0cf00a31758d --- /dev/null +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_migrations/include_deleted_to_status_filters/include_deleted_true/test_old_config.json @@ -0,0 +1,15 @@ +{ + "start_date": "2021-02-08T00:00:00Z", + "end_date": "2021-02-15T00:00:00Z", + "custom_insights": [ + { + "name": "custom_insight_stream", + "fields": ["account_name", "clicks", "cpc", "account_id", "ad_id"], + "breakdowns": ["gender"], + "action_breakdowns": [] + } + ], + "include_deleted": true, + "account_ids": ["01234567890"], + "access_token": "access_token" +} diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_migrations/include_deleted_to_status_filters/test_upgraded_config.json b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_migrations/include_deleted_to_status_filters/test_upgraded_config.json new file mode 100644 index 000000000000..e579fa634de8 --- /dev/null +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_migrations/include_deleted_to_status_filters/test_upgraded_config.json @@ -0,0 +1,46 @@ +{ + "start_date": "2021-02-08T00:00:00Z", + "end_date": "2021-02-15T00:00:00Z", + "custom_insights": [ + { + "name": "custom_insight_stream", + "fields": ["account_name", "clicks", "cpc", "account_id", "ad_id"], + "breakdowns": ["gender"], + "action_breakdowns": [] + } + ], + "include_deleted": true, + "account_ids": ["01234567890"], + "access_token": "access_token", + "ad_statuses": [ + "ACTIVE", + "ADSET_PAUSED", + "ARCHIVED", + "CAMPAIGN_PAUSED", + "DELETED", + "DISAPPROVED", + "IN_PROCESS", + "PAUSED", + "PENDING_BILLING_INFO", + "PENDING_REVIEW", + "PREAPPROVED", + "WITH_ISSUES" + ], + "adset_statuses": [ + "ACTIVE", + "ARCHIVED", + "CAMPAIGN_PAUSED", + "DELETED", + "IN_PROCESS", + "PAUSED", + "WITH_ISSUES" + ], + "campaign_statuses": [ + "ACTIVE", + "ARCHIVED", + "DELETED", + "IN_PROCESS", + "PAUSED", + "WITH_ISSUES" + ] +} diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_source.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_source.py index 7b96c5ced3b9..2ca1e4e6a822 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_source.py @@ -32,8 +32,16 @@ def config_fixture(requests_mock): "start_date": "2019-10-10T00:00:00Z", "end_date": "2020-10-10T00:00:00Z", } - requests_mock.register_uri("GET", FacebookSession.GRAPH + f"/{FacebookAdsApi.API_VERSION}/me/business_users", json={"data": []}) - requests_mock.register_uri("GET", FacebookSession.GRAPH + f"/{FacebookAdsApi.API_VERSION}/act_123/", json={"account": 123}) + requests_mock.register_uri( + "GET", + FacebookSession.GRAPH + f"/{FacebookAdsApi.API_VERSION}/me/business_users", + json={"data": []}, + ) + requests_mock.register_uri( + "GET", + FacebookSession.GRAPH + f"/{FacebookAdsApi.API_VERSION}/act_123/", + json={"account": 123}, + ) return config @@ -79,7 +87,9 @@ def test_check_connection_ok(self, config, logger_mock, fb_marketing): assert ok assert not error_msg - def test_check_connection_find_account_was_called(self, api_find_account, config, logger_mock, fb_marketing): + def test_check_connection_find_account_was_called( + self, api_find_account, config, logger_mock, fb_marketing + ): """Check if _find_account was called to validate credentials""" ok, error_msg = fb_marketing.check_connection(logger_mock, config=config) @@ -93,7 +103,9 @@ def test_check_connection_find_account_was_called(self, api_find_account, config assert ok assert not error_msg - def test_check_connection_future_date_range(self, api, config, logger_mock, fb_marketing): + def test_check_connection_future_date_range( + self, api, config, logger_mock, fb_marketing + ): config["start_date"] = "2219-10-10T00:00:00" config["end_date"] = "2219-10-11T00:00:00" assert fb_marketing.check_connection(logger_mock, config=config) == ( @@ -101,7 +113,9 @@ def test_check_connection_future_date_range(self, api, config, logger_mock, fb_m "Date range can not be in the future.", ) - def test_check_connection_end_date_before_start_date(self, api, config, logger_mock, fb_marketing): + def test_check_connection_end_date_before_start_date( + self, api, config, logger_mock, fb_marketing + ): config["start_date"] = "2019-10-10T00:00:00" config["end_date"] = "2019-10-09T00:00:00" assert fb_marketing.check_connection(logger_mock, config=config) == ( @@ -116,7 +130,9 @@ def test_check_connection_empty_config(self, api, logger_mock, fb_marketing): assert not ok assert error_msg - def test_check_connection_config_no_start_date(self, api, config, logger_mock, fb_marketing): + def test_check_connection_config_no_start_date( + self, api, config, logger_mock, fb_marketing + ): config.pop("start_date") ok, error_msg = fb_marketing.check_connection(logger_mock, config=config) @@ -143,24 +159,44 @@ def test_spec(self, fb_marketing): def test_get_custom_insights_streams(self, api, config, fb_marketing): config["custom_insights"] = [ - {"name": "test", "fields": ["account_id"], "breakdowns": ["ad_format_asset"], "action_breakdowns": ["action_device"]}, + { + "name": "test", + "fields": ["account_id"], + "breakdowns": ["ad_format_asset"], + "action_breakdowns": ["action_device"], + }, ] config = ConnectorConfig.parse_obj(config) assert fb_marketing.get_custom_insights_streams(api, config) - def test_get_custom_insights_action_breakdowns_allow_empty(self, api, config, fb_marketing): + def test_get_custom_insights_action_breakdowns_allow_empty( + self, api, config, fb_marketing + ): config["custom_insights"] = [ - {"name": "test", "fields": ["account_id"], "breakdowns": ["ad_format_asset"], "action_breakdowns": []}, + { + "name": "test", + "fields": ["account_id"], + "breakdowns": ["ad_format_asset"], + "action_breakdowns": [], + }, ] config["action_breakdowns_allow_empty"] = False - streams = fb_marketing.get_custom_insights_streams(api, ConnectorConfig.parse_obj(config)) + streams = fb_marketing.get_custom_insights_streams( + api, ConnectorConfig.parse_obj(config) + ) assert len(streams) == 1 assert streams[0].breakdowns == ["ad_format_asset"] - assert streams[0].action_breakdowns == ["action_type", "action_target_id", "action_destination"] + assert streams[0].action_breakdowns == [ + "action_type", + "action_target_id", + "action_destination", + ] config["action_breakdowns_allow_empty"] = True - streams = fb_marketing.get_custom_insights_streams(api, ConnectorConfig.parse_obj(config)) + streams = fb_marketing.get_custom_insights_streams( + api, ConnectorConfig.parse_obj(config) + ) assert len(streams) == 1 assert streams[0].breakdowns == ["ad_format_asset"] assert streams[0].action_breakdowns == [] @@ -187,9 +223,13 @@ def test_read_missing_stream(self, config, api, logger_mock, fb_marketing): def test_check_config(config_gen, requests_mock, fb_marketing): - requests_mock.register_uri("GET", FacebookSession.GRAPH + f"/{FacebookAdsApi.API_VERSION}/act_123/", {}) + requests_mock.register_uri( + "GET", FacebookSession.GRAPH + f"/{FacebookAdsApi.API_VERSION}/act_123/", {} + ) - assert command_check(fb_marketing, config_gen()) == AirbyteConnectionStatus(status=Status.SUCCEEDED, message=None) + assert command_check(fb_marketing, config_gen()) == AirbyteConnectionStatus( + status=Status.SUCCEEDED, message=None + ) status = command_check(fb_marketing, config_gen(start_date="2019-99-10T00:00:00Z")) assert status.status == Status.FAILED @@ -200,5 +240,9 @@ def test_check_config(config_gen, requests_mock, fb_marketing): status = command_check(fb_marketing, config_gen(start_date=...)) assert status.status == Status.SUCCEEDED - assert command_check(fb_marketing, config_gen(end_date=...)) == AirbyteConnectionStatus(status=Status.SUCCEEDED, message=None) - assert command_check(fb_marketing, config_gen(end_date="")) == AirbyteConnectionStatus(status=Status.SUCCEEDED, message=None) + assert command_check( + fb_marketing, config_gen(end_date=...) + ) == AirbyteConnectionStatus(status=Status.SUCCEEDED, message=None) + assert command_check( + fb_marketing, config_gen(end_date="") + ) == AirbyteConnectionStatus(status=Status.SUCCEEDED, message=None) diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_streams.py index a2b03c52e67c..d300af0571b7 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_streams.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_streams.py @@ -7,6 +7,7 @@ from pendulum import duration from source_facebook_marketing.api import MyFacebookAdsApi from source_facebook_marketing.streams import ( + AdSets, AdsInsights, AdsInsightsActionType, AdsInsightsAgeAndGender, @@ -21,38 +22,63 @@ def test_filter_all_statuses(api, mocker, some_config): mocker.patch.multiple(FBMarketingStream, __abstractmethods__=set()) + expected = {} + assert ( + FBMarketingStream( + api=api, account_ids=some_config["account_ids"] + )._filter_all_statuses() + == expected + ) + expected = { "filtering": [ { - "field": "None.delivery_info", + "field": "adset.effective_status", "operator": "IN", "value": [ - "active", - "archived", - "completed", - "limited", - "not_delivering", - "deleted", - "not_published", - "pending_review", - "permanently_deleted", - "recently_completed", - "recently_rejected", - "rejected", - "scheduled", - "inactive", + "ACTIVE", + "ARCHIVED", + "CAMPAIGN_PAUSED", + "DELETED", + "IN_PROCESS", + "PAUSED", + "WITH_ISSUES", ], } ] } - assert FBMarketingStream(api=api, account_ids=some_config["account_ids"])._filter_all_statuses() == expected + assert ( + AdSets( + account_ids=some_config["account_ids"], + start_date="", + end_date="", + api=api, + filter_statuses=[ + "ACTIVE", + "ARCHIVED", + "CAMPAIGN_PAUSED", + "DELETED", + "IN_PROCESS", + "PAUSED", + "WITH_ISSUES", + ], + )._filter_all_statuses() + == expected + ) @pytest.mark.parametrize( - "url", ["https://graph.facebook.com", "https://graph.facebook.com?test=123%23%24%25%2A&test2=456", "https://graph.facebook.com?"] + "url", + [ + "https://graph.facebook.com", + "https://graph.facebook.com?test=123%23%24%25%2A&test2=456", + "https://graph.facebook.com?", + ], ) def test_fetch_thumbnail_data_url(url, requests_mock): - requests_mock.get(url, status_code=200, headers={"content-type": "content-type"}, content=b"") + requests_mock.get( + url, status_code=200, headers={"content-type": "content-type"}, content=b"" + ) assert fetch_thumbnail_data_url(url) == "data:content-type;base64," @@ -69,14 +95,36 @@ def test_parse_call_rate_header(): [ [AdsInsights, [], ["action_type", "action_target_id", "action_destination"]], [AdsInsightsActionType, [], ["action_type"]], - [AdsInsightsAgeAndGender, ["age", "gender"], ["action_type", "action_target_id", "action_destination"]], - [AdsInsightsCountry, ["country"], ["action_type", "action_target_id", "action_destination"]], - [AdsInsightsDma, ["dma"], ["action_type", "action_target_id", "action_destination"]], - [AdsInsightsPlatformAndDevice, ["publisher_platform", "platform_position", "impression_device"], ["action_type"]], - [AdsInsightsRegion, ["region"], ["action_type", "action_target_id", "action_destination"]], + [ + AdsInsightsAgeAndGender, + ["age", "gender"], + ["action_type", "action_target_id", "action_destination"], + ], + [ + AdsInsightsCountry, + ["country"], + ["action_type", "action_target_id", "action_destination"], + ], + [ + AdsInsightsDma, + ["dma"], + ["action_type", "action_target_id", "action_destination"], + ], + [ + AdsInsightsPlatformAndDevice, + ["publisher_platform", "platform_position", "impression_device"], + ["action_type"], + ], + [ + AdsInsightsRegion, + ["region"], + ["action_type", "action_target_id", "action_destination"], + ], ], ) -def test_ads_insights_breakdowns(class_name, breakdowns, action_breakdowns, some_config): +def test_ads_insights_breakdowns( + class_name, breakdowns, action_breakdowns, some_config +): kwargs = { "api": None, "account_ids": some_config["account_ids"], @@ -97,15 +145,26 @@ def test_custom_ads_insights_breakdowns(some_config): "end_date": pendulum.now(), "insights_lookback_window": 1, } - stream = AdsInsights(breakdowns=["mmm"], action_breakdowns=["action_destination"], **kwargs) + stream = AdsInsights( + breakdowns=["mmm"], action_breakdowns=["action_destination"], **kwargs + ) assert stream.breakdowns == ["mmm"] assert stream.action_breakdowns == ["action_destination"] stream = AdsInsights(breakdowns=[], action_breakdowns=[], **kwargs) assert stream.breakdowns == [] - assert stream.action_breakdowns == ["action_type", "action_target_id", "action_destination"] - - stream = AdsInsights(breakdowns=[], action_breakdowns=[], action_breakdowns_allow_empty=True, **kwargs) + assert stream.action_breakdowns == [ + "action_type", + "action_target_id", + "action_destination", + ] + + stream = AdsInsights( + breakdowns=[], + action_breakdowns=[], + action_breakdowns_allow_empty=True, + **kwargs + ) assert stream.breakdowns == [] assert stream.action_breakdowns == [] diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_utils.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_utils.py index ccde2ee1fcba..652237fb8f91 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_utils.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/test_utils.py @@ -23,13 +23,18 @@ "start_date", pendulum.local(2019, 1, 1), pendulum.local(2020, 3, 2), - [f"The start date cannot be beyond 37 months from the current date. " f"Set start date to {pendulum.local(2020, 3, 2)}."], + [ + f"The start date cannot be beyond 37 months from the current date. " + f"Set start date to {pendulum.local(2020, 3, 2)}." + ], ), ( "start_date", TODAY + pendulum.duration(months=1), TODAY, - [f"The start date cannot be in the future. Set start date to today's date - {TODAY}."], + [ + f"The start date cannot be in the future. Set start date to today's date - {TODAY}." + ], ), ( "end_date", diff --git a/airbyte-integrations/connectors/source-mongodb-v2/integration_tests/expected_spec.json b/airbyte-integrations/connectors/source-mongodb-v2/integration_tests/expected_spec.json index e77e9d632780..bd7f8b04829e 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/integration_tests/expected_spec.json +++ b/airbyte-integrations/connectors/source-mongodb-v2/integration_tests/expected_spec.json @@ -167,6 +167,15 @@ "minimum": 10, "maximum": 100000, "group": "advanced" + }, + "invalid_cdc_cursor_position_behavior": { + "type": "string", + "title": "Invalid CDC position behavior (Advanced)", + "description": "Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value into the WAL. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss.", + "enum": ["Fail sync", "Re-sync data"], + "default": "Fail sync", + "order": 11, + "group": "advanced" } }, "groups": [ diff --git a/airbyte-integrations/connectors/source-mongodb-v2/metadata.yaml b/airbyte-integrations/connectors/source-mongodb-v2/metadata.yaml index cde2b8488af3..6d9c000b4537 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/metadata.yaml +++ b/airbyte-integrations/connectors/source-mongodb-v2/metadata.yaml @@ -5,7 +5,7 @@ data: connectorSubtype: database connectorType: source definitionId: b2e713cd-cc36-4c0a-b5bd-b47cb8a0561e - dockerImageTag: 1.2.10 + dockerImageTag: 1.2.11 dockerRepository: airbyte/source-mongodb-v2 documentationUrl: https://docs.airbyte.com/integrations/sources/mongodb-v2 githubIssueLabel: source-mongodb-v2 diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/MongoConstants.java b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/MongoConstants.java index 430fa9f9c409..6fb2bc792b19 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/MongoConstants.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/MongoConstants.java @@ -37,6 +37,10 @@ public class MongoConstants { public static final String INITIAL_RECORD_WAITING_TIME_SEC = "initial_waiting_seconds"; public static final Integer DEFAULT_INITIAL_RECORD_WAITING_TIME_SEC = 300; + public static final String INVALID_CDC_CURSOR_POSITION_PROPERTY = "invalid_cdc_cursor_position_behavior"; + public static final String FAIL_SYNC_OPTION = "Fail sync"; + public static final String RESYNC_DATA_OPTION = "Re-sync data"; + private MongoConstants() {} } diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/MongoDbSourceConfig.java b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/MongoDbSourceConfig.java index a03647bb9386..8f3f572afad2 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/MongoDbSourceConfig.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/MongoDbSourceConfig.java @@ -14,7 +14,9 @@ import static io.airbyte.integrations.source.mongodb.MongoConstants.DEFAULT_INITIAL_RECORD_WAITING_TIME_SEC; import static io.airbyte.integrations.source.mongodb.MongoConstants.DISCOVER_SAMPLE_SIZE_CONFIGURATION_KEY; import static io.airbyte.integrations.source.mongodb.MongoConstants.INITIAL_RECORD_WAITING_TIME_SEC; +import static io.airbyte.integrations.source.mongodb.MongoConstants.INVALID_CDC_CURSOR_POSITION_PROPERTY; import static io.airbyte.integrations.source.mongodb.MongoConstants.PASSWORD_CONFIGURATION_KEY; +import static io.airbyte.integrations.source.mongodb.MongoConstants.RESYNC_DATA_OPTION; import static io.airbyte.integrations.source.mongodb.MongoConstants.SCHEMA_ENFORCED_CONFIGURATION_KEY; import static io.airbyte.integrations.source.mongodb.MongoConstants.USERNAME_CONFIGURATION_KEY; @@ -96,4 +98,13 @@ public Integer getInitialWaitingTimeSeconds() { } } + public boolean shouldFailSyncOnInvalidCursor() { + if (rawConfig.has(INVALID_CDC_CURSOR_POSITION_PROPERTY) + && rawConfig.get(INVALID_CDC_CURSOR_POSITION_PROPERTY).asText().equals(RESYNC_DATA_OPTION)) { + return false; + } else { + return true; + } + } + } diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcInitializer.java b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcInitializer.java index 8b73e6ab3f40..279eb053f3f2 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcInitializer.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcInitializer.java @@ -12,6 +12,7 @@ import com.mongodb.client.MongoDatabase; import io.airbyte.cdk.integrations.base.AirbyteTraceMessageUtility; import io.airbyte.cdk.integrations.debezium.AirbyteDebeziumHandler; +import io.airbyte.commons.exceptions.ConfigErrorException; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.util.AutoCloseableIterator; import io.airbyte.commons.util.AutoCloseableIterators; @@ -116,6 +117,10 @@ public List> createCdcIterators( if (!savedOffsetIsValid) { AirbyteTraceMessageUtility.emitAnalyticsTrace(cdcCursorInvalidMessage()); + if (config.shouldFailSyncOnInvalidCursor()) { + throw new ConfigErrorException( + "Saved offset is not valid. Please reset the connection, and then increase oplog retention or reduce sync frequency to prevent his from happening in the future. See https://docs.airbyte.com/integrations/sources/mongodb-v2#mongodb-oplog-and-change-streams for more details"); + } LOGGER.info("Saved offset is not valid. Airbyte will trigger a full refresh."); // If the offset in the state is invalid, reset the state to the initial STATE stateManager.resetState(new MongoDbCdcState(initialDebeziumState, config.getEnforceSchema())); diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/main/resources/spec.json b/airbyte-integrations/connectors/source-mongodb-v2/src/main/resources/spec.json index 9c4af4f046c9..07a7268b7158 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/main/resources/spec.json @@ -167,6 +167,15 @@ "minimum": 10, "maximum": 100000, "group": "advanced" + }, + "invalid_cdc_cursor_position_behavior": { + "type": "string", + "title": "Invalid CDC position behavior (Advanced)", + "description": "Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value into the WAL. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss.", + "enum": ["Fail sync", "Re-sync data"], + "default": "Fail sync", + "order": 11, + "group": "advanced" } }, "groups": [ diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/java/io/airbyte/integrations/source/mongodb/MongoDbSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/java/io/airbyte/integrations/source/mongodb/MongoDbSourceAcceptanceTest.java index fa220e4386db..5b7703b85c81 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/java/io/airbyte/integrations/source/mongodb/MongoDbSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/java/io/airbyte/integrations/source/mongodb/MongoDbSourceAcceptanceTest.java @@ -9,6 +9,7 @@ import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -508,14 +509,8 @@ void testSyncShouldHandlePurgedLogsGracefully() throws Exception { stateMessage.getGlobal().setSharedState(Jsons.jsonNode(cdcState)); final JsonNode state = Jsons.jsonNode(List.of(stateMessage)); - // Re-run the sync to prove that an initial snapshot is initiated due to invalid resume token - final List messages2 = runRead(configuredCatalog, state); - - final List recordMessages2 = filterRecords(messages2); - final List stateMessages2 = filterStateMessages(messages2); - - assertEquals(recordCount, recordMessages2.size()); - assertEquals(recordCount + 1, stateMessages2.size()); + // Re-run the sync to prove that a config error is thrown due to invalid resume token + assertThrows(Exception.class, () -> runRead(configuredCatalog, state)); } @Test diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcInitializerTest.java b/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcInitializerTest.java index 9b87de23c1a3..7e0ea6eaa222 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcInitializerTest.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/test/java/io/airbyte/integrations/source/mongodb/cdc/MongoDbCdcInitializerTest.java @@ -5,6 +5,8 @@ package io.airbyte.integrations.source.mongodb.cdc; import static io.airbyte.integrations.source.mongodb.MongoConstants.DATABASE_CONFIG_CONFIGURATION_KEY; +import static io.airbyte.integrations.source.mongodb.MongoConstants.INVALID_CDC_CURSOR_POSITION_PROPERTY; +import static io.airbyte.integrations.source.mongodb.MongoConstants.RESYNC_DATA_OPTION; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; @@ -20,6 +22,7 @@ import static org.mockito.Mockito.when; import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.collect.ImmutableMap; import com.mongodb.MongoCommandException; import com.mongodb.ServerAddress; import com.mongodb.client.AggregateIterable; @@ -205,20 +208,53 @@ void testCreateCdcIteratorsFromInitialStateWithCompletedInitialSnapshot() { } @Test - void testCreateCdcIteratorsWithCompletedInitialSnapshotSavedOffsetInvalid() { + void testCreateCdcIteratorsWithCompletedInitialSnapshotSavedOffsetInvalidDefaultBehavior() { + when(changeStreamIterable.cursor()) + .thenReturn(mongoChangeStreamCursor) + .thenThrow(new MongoCommandException(new BsonDocument(), new ServerAddress())) + .thenReturn(mongoChangeStreamCursor); + final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(createInitialDebeziumState(InitialSnapshotStatus.COMPLETE)); + assertThrows(ConfigErrorException.class, () -> cdcInitializer.createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG, + stateManager, EMITTED_AT, CONFIG)); + } + + @Test + void testCreateCdcIteratorsWithCompletedInitialSnapshotSavedOffsetFailOption() { + when(changeStreamIterable.cursor()) + .thenReturn(mongoChangeStreamCursor) + .thenThrow(new MongoCommandException(new BsonDocument(), new ServerAddress())) + .thenReturn(mongoChangeStreamCursor); + final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(createInitialDebeziumState(InitialSnapshotStatus.COMPLETE)); + assertThrows(ConfigErrorException.class, () -> cdcInitializer.createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG, + stateManager, EMITTED_AT, CONFIG)); + } + + @Test + void testCreateCdcIteratorsWithCompletedInitialSnapshotSavedOffsetInvalidResyncOption() { + MongoDbSourceConfig resyncConfig = new MongoDbSourceConfig(createConfig(RESYNC_DATA_OPTION)); when(changeStreamIterable.cursor()) .thenReturn(mongoChangeStreamCursor) .thenThrow(new MongoCommandException(new BsonDocument(), new ServerAddress())) .thenReturn(mongoChangeStreamCursor); final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(createInitialDebeziumState(InitialSnapshotStatus.COMPLETE)); final List> iterators = cdcInitializer - .createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG, stateManager, EMITTED_AT, CONFIG); + .createCdcIterators(mongoClient, cdcConnectorMetadataInjector, CONFIGURED_CATALOG, stateManager, EMITTED_AT, resyncConfig); assertNotNull(iterators); assertEquals(2, iterators.size(), "Should always have 2 iterators: 1 for the initial snapshot and 1 for the cdc stream"); assertTrue(iterators.get(0).hasNext(), "Initial snapshot iterator should at least have one message if its snapshot state is set as complete but needs to start over due to invalid saved offset"); } + JsonNode createConfig(String cdcCursorFailBehaviour) { + return Jsons.jsonNode(ImmutableMap.builder() + .put(DATABASE_CONFIG_CONFIGURATION_KEY, + Map.of( + MongoDbDebeziumConstants.Configuration.CONNECTION_STRING_CONFIGURATION_KEY, "mongodb://host:12345/", + MongoDbDebeziumConstants.Configuration.DATABASE_CONFIGURATION_KEY, DATABASE)) + .put(INVALID_CDC_CURSOR_POSITION_PROPERTY, cdcCursorFailBehaviour) + .build()); + } + @Test void testUnableToExtractOffsetFromStateException() { final MongoDbStateManager stateManager = MongoDbStateManager.createStateManager(createInitialDebeziumState(InitialSnapshotStatus.COMPLETE)); diff --git a/airbyte-integrations/connectors/source-mysql/metadata.yaml b/airbyte-integrations/connectors/source-mysql/metadata.yaml index 68a3b86807fe..f63c1375e894 100644 --- a/airbyte-integrations/connectors/source-mysql/metadata.yaml +++ b/airbyte-integrations/connectors/source-mysql/metadata.yaml @@ -9,7 +9,7 @@ data: connectorSubtype: database connectorType: source definitionId: 435bb9a5-7887-4809-aa58-28c27df0d7ad - dockerImageTag: 3.3.7 + dockerImageTag: 3.3.8 dockerRepository: airbyte/source-mysql documentationUrl: https://docs.airbyte.com/integrations/sources/mysql githubIssueLabel: source-mysql diff --git a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlSpecConstants.java b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlSpecConstants.java new file mode 100644 index 000000000000..7735470482da --- /dev/null +++ b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlSpecConstants.java @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.mysql; + +// Constants defined in +// airbyte-integrations/connectors/source-postgres/src/main/resources/spec.json. +public class MySqlSpecConstants { + + public static final String INVALID_CDC_CURSOR_POSITION_PROPERTY = "invalid_cdc_cursor_position_behavior"; + public static final String FAIL_SYNC_OPTION = "Fail sync"; + public static final String RESYNC_DATA_OPTION = "Re-sync data"; + +} diff --git a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialReadUtil.java b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialReadUtil.java index ff0b7a477e5c..47aa83ee09fc 100644 --- a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialReadUtil.java +++ b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/initialsync/MySqlInitialReadUtil.java @@ -7,6 +7,8 @@ import static io.airbyte.cdk.db.DbAnalyticsUtils.cdcCursorInvalidMessage; import static io.airbyte.integrations.source.mysql.MySqlQueryUtils.getTableSizeInfoForStreams; import static io.airbyte.integrations.source.mysql.MySqlQueryUtils.prettyPrintConfiguredAirbyteStreamList; +import static io.airbyte.integrations.source.mysql.MySqlSpecConstants.FAIL_SYNC_OPTION; +import static io.airbyte.integrations.source.mysql.MySqlSpecConstants.INVALID_CDC_CURSOR_POSITION_PROPERTY; import static io.airbyte.integrations.source.mysql.cdc.MysqlCdcStateConstants.MYSQL_CDC_OFFSET; import static io.airbyte.integrations.source.mysql.initialsync.MySqlInitialLoadGlobalStateManager.STATE_TYPE_KEY; import static io.airbyte.integrations.source.mysql.initialsync.MySqlInitialLoadStateManager.PRIMARY_KEY_STATE_TYPE; @@ -25,6 +27,7 @@ import io.airbyte.cdk.integrations.source.relationaldb.TableInfo; import io.airbyte.cdk.integrations.source.relationaldb.models.CdcState; import io.airbyte.cdk.integrations.source.relationaldb.state.StateManager; +import io.airbyte.commons.exceptions.ConfigErrorException; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.util.AutoCloseableIterator; import io.airbyte.commons.util.AutoCloseableIterators; @@ -111,6 +114,11 @@ public static List> getCdcReadIterators(fi if (!savedOffsetStillPresentOnServer) { AirbyteTraceMessageUtility.emitAnalyticsTrace(cdcCursorInvalidMessage()); + if (!sourceConfig.get("replication_method").has(INVALID_CDC_CURSOR_POSITION_PROPERTY) || sourceConfig.get("replication_method").get( + INVALID_CDC_CURSOR_POSITION_PROPERTY).asText().equals(FAIL_SYNC_OPTION)) { + throw new ConfigErrorException( + "Saved offset no longer present on the server. Please reset the connection, and then increase binlog retention or reduce sync frequency. See https://docs.airbyte.com/integrations/sources/mysql/mysql-troubleshooting#under-cdc-incremental-mode-there-are-still-full-refresh-syncs for more details."); + } LOGGER.warn("Saved offset no longer present on the server, Airbyte is going to trigger a sync from scratch"); } diff --git a/airbyte-integrations/connectors/source-mysql/src/main/resources/spec.json b/airbyte-integrations/connectors/source-mysql/src/main/resources/spec.json index 841fa1f3bdba..78450b13aabd 100644 --- a/airbyte-integrations/connectors/source-mysql/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/source-mysql/src/main/resources/spec.json @@ -211,6 +211,15 @@ "description": "Enter the configured MySQL server timezone. This should only be done if the configured timezone in your MySQL instance does not conform to IANNA standard.", "order": 2, "always_show": true + }, + "invalid_cdc_cursor_position_behavior": { + "type": "string", + "title": "Invalid CDC position behavior (Advanced)", + "description": "Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value into the WAL. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss.", + "enum": ["Fail sync", "Re-sync data"], + "default": "Fail sync", + "order": 3, + "always_show": true } } }, diff --git a/airbyte-integrations/connectors/source-mysql/src/test-integration/resources/expected_cloud_spec.json b/airbyte-integrations/connectors/source-mysql/src/test-integration/resources/expected_cloud_spec.json index 50d717a95886..871b7c0c38bb 100644 --- a/airbyte-integrations/connectors/source-mysql/src/test-integration/resources/expected_cloud_spec.json +++ b/airbyte-integrations/connectors/source-mysql/src/test-integration/resources/expected_cloud_spec.json @@ -189,6 +189,15 @@ "description": "Enter the configured MySQL server timezone. This should only be done if the configured timezone in your MySQL instance does not conform to IANNA standard.", "order": 2, "always_show": true + }, + "invalid_cdc_cursor_position_behavior": { + "type": "string", + "title": "Invalid CDC position behavior (Advanced)", + "description": "Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value into the WAL. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss.", + "enum": ["Fail sync", "Re-sync data"], + "default": "Fail sync", + "order": 3, + "always_show": true } } }, diff --git a/airbyte-integrations/connectors/source-mysql/src/test-integration/resources/expected_oss_spec.json b/airbyte-integrations/connectors/source-mysql/src/test-integration/resources/expected_oss_spec.json index 1a884d8de813..7ffbbad5f718 100644 --- a/airbyte-integrations/connectors/source-mysql/src/test-integration/resources/expected_oss_spec.json +++ b/airbyte-integrations/connectors/source-mysql/src/test-integration/resources/expected_oss_spec.json @@ -211,6 +211,15 @@ "description": "Enter the configured MySQL server timezone. This should only be done if the configured timezone in your MySQL instance does not conform to IANNA standard.", "order": 2, "always_show": true + }, + "invalid_cdc_cursor_position_behavior": { + "type": "string", + "title": "Invalid CDC position behavior (Advanced)", + "description": "Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value into the WAL. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss.", + "enum": ["Fail sync", "Re-sync data"], + "default": "Fail sync", + "order": 3, + "always_show": true } } }, diff --git a/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/CdcMysqlSourceTest.java b/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/CdcMysqlSourceTest.java index 5825ae0848df..b88b5baa6420 100644 --- a/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/CdcMysqlSourceTest.java +++ b/airbyte-integrations/connectors/source-mysql/src/test/java/io/airbyte/integrations/source/mysql/CdcMysqlSourceTest.java @@ -10,6 +10,7 @@ import static io.airbyte.integrations.source.mysql.MySqlSource.CDC_DEFAULT_CURSOR; import static io.airbyte.integrations.source.mysql.MySqlSource.CDC_LOG_FILE; import static io.airbyte.integrations.source.mysql.MySqlSource.CDC_LOG_POS; +import static io.airbyte.integrations.source.mysql.MySqlSpecConstants.FAIL_SYNC_OPTION; import static io.airbyte.integrations.source.mysql.cdc.MysqlCdcStateConstants.IS_COMPRESSED; import static io.airbyte.integrations.source.mysql.cdc.MysqlCdcStateConstants.MYSQL_CDC_OFFSET; import static io.airbyte.integrations.source.mysql.cdc.MysqlCdcStateConstants.MYSQL_DB_HISTORY; @@ -20,6 +21,7 @@ import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import com.fasterxml.jackson.databind.JsonNode; @@ -33,6 +35,7 @@ import io.airbyte.cdk.db.jdbc.JdbcDatabase; import io.airbyte.cdk.integrations.debezium.CdcSourceTest; import io.airbyte.cdk.integrations.debezium.internals.AirbyteSchemaHistoryStorage; +import io.airbyte.commons.exceptions.ConfigErrorException; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.util.AutoCloseableIterator; import io.airbyte.commons.util.AutoCloseableIterators; @@ -277,6 +280,12 @@ protected void syncShouldHandlePurgedLogsGracefully() throws Exception { dataFromSecondBatch); assertEquals((recordsToCreate * 2) + recordsCreatedBeforeTestCount, recordsFromSecondBatch.size(), "Expected 46 records to be replicated in the second sync."); + + JsonNode failSyncConfig = testdb.testConfigBuilder() + .withCdcReplication(FAIL_SYNC_OPTION) + .with(SYNC_CHECKPOINT_RECORDS_PROPERTY, 1) + .build(); + assertThrows(ConfigErrorException.class, () -> source().read(failSyncConfig, getConfiguredCatalog(), state)); } /** diff --git a/airbyte-integrations/connectors/source-mysql/src/test/resources/expected_cloud_spec.json b/airbyte-integrations/connectors/source-mysql/src/test/resources/expected_cloud_spec.json index 52441e124b17..66f0b3bdf647 100644 --- a/airbyte-integrations/connectors/source-mysql/src/test/resources/expected_cloud_spec.json +++ b/airbyte-integrations/connectors/source-mysql/src/test/resources/expected_cloud_spec.json @@ -205,6 +205,15 @@ "description": "Enter the configured MySQL server timezone. This should only be done if the configured timezone in your MySQL instance does not conform to IANNA standard.", "order": 2, "always_show": true + }, + "invalid_cdc_cursor_position_behavior": { + "type": "string", + "title": "Invalid CDC position behavior (Advanced)", + "description": "Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value into the WAL. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss.", + "enum": ["Fail sync", "Re-sync data"], + "default": "Fail sync", + "order": 3, + "always_show": true } } }, diff --git a/airbyte-integrations/connectors/source-mysql/src/test/resources/expected_oss_spec.json b/airbyte-integrations/connectors/source-mysql/src/test/resources/expected_oss_spec.json index 841fa1f3bdba..78450b13aabd 100644 --- a/airbyte-integrations/connectors/source-mysql/src/test/resources/expected_oss_spec.json +++ b/airbyte-integrations/connectors/source-mysql/src/test/resources/expected_oss_spec.json @@ -211,6 +211,15 @@ "description": "Enter the configured MySQL server timezone. This should only be done if the configured timezone in your MySQL instance does not conform to IANNA standard.", "order": 2, "always_show": true + }, + "invalid_cdc_cursor_position_behavior": { + "type": "string", + "title": "Invalid CDC position behavior (Advanced)", + "description": "Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value into the WAL. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss.", + "enum": ["Fail sync", "Re-sync data"], + "default": "Fail sync", + "order": 3, + "always_show": true } } }, diff --git a/airbyte-integrations/connectors/source-mysql/src/testFixtures/java/io/airbyte/integrations/source/mysql/MySQLTestDatabase.java b/airbyte-integrations/connectors/source-mysql/src/testFixtures/java/io/airbyte/integrations/source/mysql/MySQLTestDatabase.java index fc031ed541c7..219d5e90f479 100644 --- a/airbyte-integrations/connectors/source-mysql/src/testFixtures/java/io/airbyte/integrations/source/mysql/MySQLTestDatabase.java +++ b/airbyte-integrations/connectors/source-mysql/src/testFixtures/java/io/airbyte/integrations/source/mysql/MySQLTestDatabase.java @@ -4,6 +4,9 @@ package io.airbyte.integrations.source.mysql; +import static io.airbyte.integrations.source.mysql.MySqlSpecConstants.INVALID_CDC_CURSOR_POSITION_PROPERTY; +import static io.airbyte.integrations.source.mysql.MySqlSpecConstants.RESYNC_DATA_OPTION; + import com.google.common.collect.ImmutableMap; import io.airbyte.cdk.db.factory.DatabaseDriver; import io.airbyte.cdk.testutils.TestDatabase; @@ -128,12 +131,17 @@ public MySQLConfigBuilder withStandardReplication() { } public MySQLConfigBuilder withCdcReplication() { + return withCdcReplication(RESYNC_DATA_OPTION); + } + + public MySQLConfigBuilder withCdcReplication(String cdcCursorFailBehaviour) { return this .with("is_test", true) .with("replication_method", ImmutableMap.builder() .put("method", "CDC") .put("initial_waiting_seconds", 5) .put("server_time_zone", "America/Los_Angeles") + .put(INVALID_CDC_CURSOR_POSITION_PROPERTY, cdcCursorFailBehaviour) .build()); } diff --git a/airbyte-integrations/connectors/source-postgres/metadata.yaml b/airbyte-integrations/connectors/source-postgres/metadata.yaml index a7936f242b13..8f23c3a5a6be 100644 --- a/airbyte-integrations/connectors/source-postgres/metadata.yaml +++ b/airbyte-integrations/connectors/source-postgres/metadata.yaml @@ -9,7 +9,7 @@ data: connectorSubtype: database connectorType: source definitionId: decd338e-5647-4c0b-adf4-da0e75f5a750 - dockerImageTag: 3.3.10 + dockerImageTag: 3.3.11 dockerRepository: airbyte/source-postgres documentationUrl: https://docs.airbyte.com/integrations/sources/postgres githubIssueLabel: source-postgres diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSpecConstants.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSpecConstants.java new file mode 100644 index 000000000000..a2e89818ee13 --- /dev/null +++ b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSpecConstants.java @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.postgres; + +// Constants defined in +// airbyte-integrations/connectors/source-postgres/src/main/resources/spec.json. +public class PostgresSpecConstants { + + public static final String INVALID_CDC_CURSOR_POSITION_PROPERTY = "invalid_cdc_cursor_position_behavior"; + public static final String FAIL_SYNC_OPTION = "Fail sync"; + public static final String RESYNC_DATA_OPTION = "Re-sync data"; + +} diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/cdc/PostgresCdcCtidInitializer.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/cdc/PostgresCdcCtidInitializer.java index 74c1c28f6b7c..45c20156aab2 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/cdc/PostgresCdcCtidInitializer.java +++ b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/cdc/PostgresCdcCtidInitializer.java @@ -6,6 +6,8 @@ import static io.airbyte.cdk.db.DbAnalyticsUtils.cdcCursorInvalidMessage; import static io.airbyte.integrations.source.postgres.PostgresQueryUtils.streamsUnderVacuum; +import static io.airbyte.integrations.source.postgres.PostgresSpecConstants.FAIL_SYNC_OPTION; +import static io.airbyte.integrations.source.postgres.PostgresSpecConstants.INVALID_CDC_CURSOR_POSITION_PROPERTY; import static io.airbyte.integrations.source.postgres.PostgresUtils.isDebugMode; import static io.airbyte.integrations.source.postgres.PostgresUtils.prettyPrintConfiguredAirbyteStreamList; @@ -112,6 +114,11 @@ public static List> cdcCtidIteratorsCombin if (!savedOffsetAfterReplicationSlotLSN) { AirbyteTraceMessageUtility.emitAnalyticsTrace(cdcCursorInvalidMessage()); + if (!sourceConfig.get("replication_method").has(INVALID_CDC_CURSOR_POSITION_PROPERTY) || sourceConfig.get("replication_method").get( + INVALID_CDC_CURSOR_POSITION_PROPERTY).asText().equals(FAIL_SYNC_OPTION)) { + throw new ConfigErrorException( + "Saved offset is before replication slot's confirmed lsn. Please reset the connection, and then increase WAL retention or reduce sync frequency to prevent this from happening in the future. See https://docs.airbyte.com/integrations/sources/postgres/postgres-troubleshooting#under-cdc-incremental-mode-there-are-still-full-refresh-syncs for more details."); + } LOGGER.warn("Saved offset is before Replication slot's confirmed_flush_lsn, Airbyte will trigger sync from scratch"); } else if (!isDebugMode(sourceConfig) && PostgresUtils.shouldFlushAfterSync(sourceConfig)) { // We do not want to acknowledge the WAL logs in debug mode. diff --git a/airbyte-integrations/connectors/source-postgres/src/main/resources/spec.json b/airbyte-integrations/connectors/source-postgres/src/main/resources/spec.json index 874b284be01d..e1cc6ff8b367 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/source-postgres/src/main/resources/spec.json @@ -296,6 +296,14 @@ "description": "Specifies a query that the connector executes on the source database when the connector sends a heartbeat message. Please see the setup guide for how and when to configure this setting.", "default": "", "order": 8 + }, + "invalid_cdc_cursor_position_behavior": { + "type": "string", + "title": "Invalid CDC position behavior (Advanced)", + "description": "Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value into the WAL. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss.", + "enum": ["Fail sync", "Re-sync data"], + "default": "Fail sync", + "order": 9 } } }, diff --git a/airbyte-integrations/connectors/source-postgres/src/test-integration/resources/expected_cloud_deployment_spec.json b/airbyte-integrations/connectors/source-postgres/src/test-integration/resources/expected_cloud_deployment_spec.json index 96fe095dbc9e..1243163310b0 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test-integration/resources/expected_cloud_deployment_spec.json +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/resources/expected_cloud_deployment_spec.json @@ -297,6 +297,14 @@ "description": "Specifies a query that the connector executes on the source database when the connector sends a heartbeat message. Please see the setup guide for how and when to configure this setting.", "default": "", "order": 8 + }, + "invalid_cdc_cursor_position_behavior": { + "type": "string", + "title": "Invalid CDC position behavior (Advanced)", + "description": "Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value into the WAL. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss.", + "enum": ["Fail sync", "Re-sync data"], + "default": "Fail sync", + "order": 9 } } }, diff --git a/airbyte-integrations/connectors/source-postgres/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-postgres/src/test-integration/resources/expected_spec.json index febf87168ec9..040446878181 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test-integration/resources/expected_spec.json +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/resources/expected_spec.json @@ -296,6 +296,14 @@ "description": "Specifies a query that the connector executes on the source database when the connector sends a heartbeat message. Please see the setup guide for how and when to configure this setting.", "default": "", "order": 8 + }, + "invalid_cdc_cursor_position_behavior": { + "type": "string", + "title": "Invalid CDC position behavior (Advanced)", + "description": "Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value into the WAL. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss.", + "enum": ["Fail sync", "Re-sync data"], + "default": "Fail sync", + "order": 9 } } }, diff --git a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CdcPostgresSourceTest.java b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CdcPostgresSourceTest.java index 551b81ac22f3..c51a2ad086d2 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CdcPostgresSourceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CdcPostgresSourceTest.java @@ -9,6 +9,8 @@ import static io.airbyte.cdk.integrations.debezium.internals.DebeziumEventConverter.CDC_DELETED_AT; import static io.airbyte.cdk.integrations.debezium.internals.DebeziumEventConverter.CDC_LSN; import static io.airbyte.cdk.integrations.debezium.internals.DebeziumEventConverter.CDC_UPDATED_AT; +import static io.airbyte.integrations.source.postgres.PostgresSpecConstants.FAIL_SYNC_OPTION; +import static io.airbyte.integrations.source.postgres.PostgresSpecConstants.RESYNC_DATA_OPTION; import static io.airbyte.integrations.source.postgres.ctid.CtidStateManager.STATE_TYPE_KEY; import static io.airbyte.integrations.source.postgres.ctid.InitialSyncCtidIteratorConstants.USE_TEST_CHUNK_SIZE; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -95,7 +97,7 @@ protected JsonNode config() { return testdb.testConfigBuilder() .withSchemas(modelsSchema(), modelsSchema() + "_random") .withoutSsl() - .withCdcReplication("After loading Data in the destination") + .withCdcReplication("After loading Data in the destination", RESYNC_DATA_OPTION) .with(SYNC_CHECKPOINT_RECORDS_PROPERTY, 1) .with("heartbeat_action_query", "") .build(); @@ -122,7 +124,7 @@ void testDebugMode() { final JsonNode invalidDebugConfig = testdb.testConfigBuilder() .withSchemas(modelsSchema(), modelsSchema() + "_random") .withoutSsl() - .withCdcReplication("While reading Data") + .withCdcReplication("While reading Data", RESYNC_DATA_OPTION) .with(SYNC_CHECKPOINT_RECORDS_PROPERTY, 1) .with("debug_mode", true) .build(); @@ -604,6 +606,47 @@ private void createAndPopulateTimestampTable() { } } + @Test + void testSyncShouldFailPurgedLogs() throws Exception { + final int recordsToCreate = 20; + + final JsonNode config = testdb.testConfigBuilder() + .withSchemas(modelsSchema(), modelsSchema() + "_random") + .withoutSsl() + .withCdcReplication("While reading Data", FAIL_SYNC_OPTION) + .with(SYNC_CHECKPOINT_RECORDS_PROPERTY, 1) + .build(); + final AutoCloseableIterator firstBatchIterator = source() + .read(config, getConfiguredCatalog(), null); + final List dataFromFirstBatch = AutoCloseableIterators + .toListAndClose(firstBatchIterator); + final List stateAfterFirstBatch = extractStateMessages(dataFromFirstBatch); + assertExpectedStateMessages(stateAfterFirstBatch); + // second batch of records again 20 being created + bulkInsertRecords(recordsToCreate); + + // Extract the last state message + final JsonNode state = Jsons.jsonNode(Collections.singletonList(stateAfterFirstBatch.get(stateAfterFirstBatch.size() - 1))); + final AutoCloseableIterator secondBatchIterator = source() + .read(config, getConfiguredCatalog(), state); + final List dataFromSecondBatch = AutoCloseableIterators + .toListAndClose(secondBatchIterator); + final List stateAfterSecondBatch = extractStateMessages(dataFromSecondBatch); + assertExpectedStateMessagesFromIncrementalSync(stateAfterSecondBatch); + + for (int recordsCreated = 0; recordsCreated < 1; recordsCreated++) { + final JsonNode record = + Jsons.jsonNode(ImmutableMap + .of(COL_ID, 400 + recordsCreated, COL_MAKE_ID, 1, COL_MODEL, + "H-" + recordsCreated)); + writeModelRecord(record); + } + + // Triggering sync with the first sync's state only which would mimic a scenario that the second + // sync failed on destination end, and we didn't save state + assertThrows(ConfigErrorException.class, () -> source().read(config, getConfiguredCatalog(), state)); + } + @Test protected void syncShouldHandlePurgedLogsGracefully() throws Exception { diff --git a/airbyte-integrations/connectors/source-postgres/src/testFixtures/java/io/airbyte/integrations/source/postgres/PostgresTestDatabase.java b/airbyte-integrations/connectors/source-postgres/src/testFixtures/java/io/airbyte/integrations/source/postgres/PostgresTestDatabase.java index a86dbd39c351..07146c33264e 100644 --- a/airbyte-integrations/connectors/source-postgres/src/testFixtures/java/io/airbyte/integrations/source/postgres/PostgresTestDatabase.java +++ b/airbyte-integrations/connectors/source-postgres/src/testFixtures/java/io/airbyte/integrations/source/postgres/PostgresTestDatabase.java @@ -4,6 +4,9 @@ package io.airbyte.integrations.source.postgres; +import static io.airbyte.integrations.source.postgres.PostgresSpecConstants.INVALID_CDC_CURSOR_POSITION_PROPERTY; +import static io.airbyte.integrations.source.postgres.PostgresSpecConstants.RESYNC_DATA_OPTION; + import com.google.common.collect.ImmutableMap; import io.airbyte.cdk.db.factory.DatabaseDriver; import io.airbyte.cdk.db.jdbc.JdbcUtils; @@ -174,10 +177,10 @@ public PostgresConfigBuilder withStandardReplication() { } public PostgresConfigBuilder withCdcReplication() { - return withCdcReplication("While reading Data"); + return withCdcReplication("While reading Data", RESYNC_DATA_OPTION); } - public PostgresConfigBuilder withCdcReplication(String LsnCommitBehaviour) { + public PostgresConfigBuilder withCdcReplication(String LsnCommitBehaviour, String cdcCursorFailBehaviour) { return this .with("is_test", true) .with("replication_method", Jsons.jsonNode(ImmutableMap.builder() @@ -186,6 +189,7 @@ public PostgresConfigBuilder withCdcReplication(String LsnCommitBehaviour) { .put("publication", testDatabase.getPublicationName()) .put("initial_waiting_seconds", DEFAULT_CDC_REPLICATION_INITIAL_WAIT.getSeconds()) .put("lsn_commit_behaviour", LsnCommitBehaviour) + .put(INVALID_CDC_CURSOR_POSITION_PROPERTY, cdcCursorFailBehaviour) .build())); } diff --git a/airbyte-integrations/connectors/source-s3/metadata.yaml b/airbyte-integrations/connectors/source-s3/metadata.yaml index f74b6ac72f2a..acbed125e654 100644 --- a/airbyte-integrations/connectors/source-s3/metadata.yaml +++ b/airbyte-integrations/connectors/source-s3/metadata.yaml @@ -10,7 +10,7 @@ data: connectorSubtype: file connectorType: source definitionId: 69589781-7828-43c5-9f63-8925b1c1ccc2 - dockerImageTag: 4.5.4 + dockerImageTag: 4.5.5 dockerRepository: airbyte/source-s3 documentationUrl: https://docs.airbyte.com/integrations/sources/s3 githubIssueLabel: source-s3 diff --git a/airbyte-integrations/connectors/source-s3/pyproject.toml b/airbyte-integrations/connectors/source-s3/pyproject.toml index aaa22997c6f0..6f12b067f101 100644 --- a/airbyte-integrations/connectors/source-s3/pyproject.toml +++ b/airbyte-integrations/connectors/source-s3/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "4.5.4" +version = "4.5.5" name = "source-s3" description = "Source implementation for S3." authors = [ "Airbyte ",] diff --git a/airbyte-integrations/connectors/source-s3/source_s3/v4/stream_reader.py b/airbyte-integrations/connectors/source-s3/source_s3/v4/stream_reader.py index d914690ee70c..411142ef71cc 100644 --- a/airbyte-integrations/connectors/source-s3/source_s3/v4/stream_reader.py +++ b/airbyte-integrations/connectors/source-s3/source_s3/v4/stream_reader.py @@ -9,6 +9,7 @@ from typing import Iterable, List, Optional, Set import boto3.session +import pendulum import pytz import smart_open from airbyte_cdk.models import FailureType @@ -205,7 +206,11 @@ def _page( continue for remote_file in self._handle_file(file): - if self.file_matches_globs(remote_file, globs) and remote_file.uri not in seen: + if ( + self.file_matches_globs(remote_file, globs) + and self.is_modified_after_start_date(remote_file.last_modified) + and remote_file.uri not in seen + ): seen.add(remote_file.uri) yield remote_file else: @@ -217,6 +222,12 @@ def _page( logger.info(f"Finished listing objects from S3 for prefix={prefix}. Found {total_n_keys_for_prefix} objects.") break + def is_modified_after_start_date(self, last_modified_date: Optional[datetime]) -> bool: + """Returns True if given date higher or equal than start date or something is missing""" + if not (self.config.start_date and last_modified_date): + return True + return last_modified_date >= pendulum.parse(self.config.start_date).naive() + def _handle_file(self, file): if file["Key"].endswith(".zip"): yield from self._handle_zip_file(file) diff --git a/airbyte-integrations/connectors/source-s3/unit_tests/v4/test_stream_reader.py b/airbyte-integrations/connectors/source-s3/unit_tests/v4/test_stream_reader.py index b1bede862d22..01ad2d926380 100644 --- a/airbyte-integrations/connectors/source-s3/unit_tests/v4/test_stream_reader.py +++ b/airbyte-integrations/connectors/source-s3/unit_tests/v4/test_stream_reader.py @@ -5,7 +5,7 @@ import io import logging -from datetime import datetime +from datetime import datetime, timedelta from itertools import product from typing import Any, Dict, List, Optional, Set from unittest.mock import patch @@ -269,3 +269,38 @@ def test_get_iam_s3_client(boto3_client_mock): # Assertions to validate the s3 client assert s3_client is not None + +@pytest.mark.parametrize( + "start_date, last_modified_date, expected_result", + ( + # True when file is new or modified after given start_date + ( + datetime.now() - timedelta(days=180), + datetime.now(), + True + ), + ( + datetime.strptime("2024-01-01T00:00:00Z", "%Y-%m-%dT%H:%M:%SZ"), + datetime.strptime("2024-01-01T00:00:00Z", "%Y-%m-%dT%H:%M:%SZ"), + True + ), + # False when file is older than given start_date + ( + datetime.now(), + datetime.now() - timedelta(days=180), + False + ) + ) +) +def test_filter_file_by_start_date(start_date: datetime, last_modified_date: datetime, expected_result: bool) -> None: + reader = SourceS3StreamReader() + + reader.config = Config( + bucket="test", + aws_access_key_id="test", + aws_secret_access_key="test", + streams=[], + start_date=start_date.strftime("%Y-%m-%dT%H:%M:%SZ") + ) + + assert expected_result == reader.is_modified_after_start_date(last_modified_date) \ No newline at end of file diff --git a/build.gradle b/build.gradle index befdc9deaa13..3d13e5f7b2d5 100644 --- a/build.gradle +++ b/build.gradle @@ -104,6 +104,7 @@ allprojects { systemProperty 'junit.jupiter.execution.parallel.config.fixed.parallelism', 1 // Order test classes by annotation. systemProperty 'junit.jupiter.testclass.order.default', 'org.junit.jupiter.api.ClassOrderer$OrderAnnotation' + systemProperty 'junit.jupiter.extensions.autodetection.enabled', 'true' if (!project.hasProperty('testExecutionConcurrency')) { // By default, let gradle spawn as many independent workers as it wants. diff --git a/docs/contributing-to-airbyte/resources/qa-checks.md b/docs/contributing-to-airbyte/resources/qa-checks.md index 4647e6c59c78..640b3d458a1a 100644 --- a/docs/contributing-to-airbyte/resources/qa-checks.md +++ b/docs/contributing-to-airbyte/resources/qa-checks.md @@ -20,11 +20,6 @@ This document should contain a section for each breaking change, in order of the *Applies to the following connector languages: java, low-code, python* The user facing connector documentation should be stored under `./docs/integrations/s/.md`. -### Connectors documentation follows our guidelines -*Applies to the following connector types: source, destination* -*Applies to the following connector languages: java, low-code, python* - -The user facing connector documentation should follow the guidelines defined in the [documentation standards](https://hackmd.io/Bz75cgATSbm7DjrAqgl4rw). ### Connectors must have a changelog entry for each version *Applies to the following connector types: source, destination* *Applies to the following connector languages: java, low-code, python* diff --git a/docs/deploying-airbyte/on-kubernetes-via-helm.md b/docs/deploying-airbyte/on-kubernetes-via-helm.md index 02b1e7602174..d4f974eb8030 100644 --- a/docs/deploying-airbyte/on-kubernetes-via-helm.md +++ b/docs/deploying-airbyte/on-kubernetes-via-helm.md @@ -167,11 +167,10 @@ Before upgrading the chart update values.yaml as stated above and then run: - Perform upgrade of chart by running `helm upgrade %release_name% airbyte/airbyte --set auth.rootPassword=$ROOT_PASSWORD` - If you get an error about setting the auth.rootPassword, then you forgot to update the `values.yaml` file -### External Logs +### External Logs with S3 ::info -This was tested using [Airbyte Helm Chart Version 0.50.13](https://artifacthub.io/packages/helm/airbyte/airbyte/0.50.13) and S3 logs only. -Previous or newer version can change how to setup the external logs. +S3 logging was tested on [Airbyte Helm Chart Version 0.50.13](https://artifacthub.io/packages/helm/airbyte/airbyte/0.50.13) ::: Create a file called `airbyte-logs-secrets.yaml` to store the AWS Keys and other informations: @@ -219,7 +218,7 @@ global: tolerations: [] affinity: {} ``` -You can try to use `GCS` or `External Minio` but both weren't tested yet. Feel free to run tests and update the documentation. +GCS Logging information is below but you can try to use `External Minio` as well but it was not tested yet. Feel free to run tests and update the documentation. Add extra env variables to the following blocks: ```yaml @@ -297,14 +296,85 @@ server: Than run: `helm upgrade --install %RELEASE_NAME% airbyte/airbyte -n --values /path/to/values.yaml --version 0.50.13` +### External Logs with GCS + + +:::Info +GCS Logging is similar to the approach taken for S3 above, with a few small differences +GCS logging was tested on [Airbyte Helm Chart Version 0.53.178](https://artifacthub.io/packages/helm/airbyte/airbyte/0.53.178) +::: + +#### Create Google Cloud Storage Bucket + +1. **Access Google Cloud Console**: Go to the Google Cloud Console and select or create a project where you want to create the bucket. +2. **Open Cloud Storage**: Navigate to "Storage" > "Browser" in the left-side menu. +3. **Create Bucket**: Click on "Create bucket". Give your bucket a unique name, select a region for the bucket, and configure other settings such as storage class and access control according to your requirements. Finally, click "Create". + +#### Create Google Cloud Service Account + +1. **Open IAM & Admin**: In the Cloud Console, navigate to "IAM & Admin" > "Service Accounts". +2. **Create Service Account**: Click "Create Service Account", enter a name, description, and then click "Create". +3. **Grant Permissions**: Assign the role of "Storage Object Admin" to the service account by selecting it from the role list. +4. **Create Key**: After creating the service account, click on it, go to the "Keys" tab, and then click "Add Key" > "Create new key". Choose JSON as the key type and click "Create". The key file will be downloaded automatically to your computer. + +#### Create a Kubernetes Secret + +- Use the **`kubectl create secret`** command to create a Kubernetes secret from the JSON key file. Replace **``** with the desired name for your secret, **``** with the path to the JSON key file you downloaded, and **``** with the namespace where your deployment will be running. + +```kubectl create secret generic --from-file=gcp.json= --namespace=``` + +#### Create an extra Volume where the GCSFS secret will be added in the values.yaml inside of the worker section +``` +worker: + extraVolumes: + - name: gcsfs-creds + secret: + secretName: + extraVolumeMounts: + - name: gcsfs-creds + mountPath: "/etc/secrets" + readOnly: true +``` + +#### Update the values.yaml with the GCS Logging Information below +Update the following Environment Variables in the global section: +``` +global: + state: + storage: + type: "GCS" + + logs: + storage: + type: "GCS" + gcs: + bucket: "" + credentials: "/etc/secrets/gcp.json" + + extraEnv: + - name: STATE_STORAGE_GCS_BUCKET_NAME + value: + - name: STATE_STORAGE_GCS_APPLICATION_CREDENTIALS + value: /etc/secrets/gcp.json + - name: CONTAINER_ORCHESTRATOR_SECRET_NAME + value: + - name: CONTAINER_ORCHESTRATOR_SECRET_MOUNT_PATH + value: /etc/secrets/ +``` + +Than run: +`helm upgrade --install %RELEASE_NAME% airbyte/airbyte -n --values /path/to/values.yaml --version 0.53.178` + ### External Airbyte Database -::info + +:::info This was tested using [Airbyte Helm Chart Version 0.50.13](https://artifacthub.io/packages/helm/airbyte/airbyte/0.50.13). Previous or newer version can change how the external database can be configured. ::: + The Airbyte Database only works with Postgres 13. Make sure the database is accessible inside the cluster using `busy-box` service using `telnet` or `ping` command. diff --git a/docs/integrations/connector-support-levels.md b/docs/integrations/connector-support-levels.md index 9baeebc9785c..f32a1619f663 100644 --- a/docs/integrations/connector-support-levels.md +++ b/docs/integrations/connector-support-levels.md @@ -12,32 +12,57 @@ The following table describes the support levels of Airbyte connectors. | **Who builds them?** | Either the community or the Airbyte team. | Typically they are built by the community. The Airbyte team may upgrade them to Certified at any time. | Anyone can build custom connectors. We recommend using our [Connector Builder](https://docs.airbyte.com/connector-development/connector-builder-ui/overview) or [Low-code CDK](https://docs.airbyte.com/connector-development/config-based/low-code-cdk-overview). | | **Who maintains them?** | The Airbyte team | Users | Users | | **Production Readiness** | Guaranteed by Airbyte | Not guaranteed | Not guaranteed | -| **Support: Cloud** | Supported* | No Support | Supported** | -| **Support: Powered by Airbyte** | Supported* | No Support | Supported** | -| **Support: Self-Managed Enterprise** | Supported* | No Support | Supported** | +| **Support: Cloud** | Supported\* | No Support | Supported\*\* | +| **Support: Powered by Airbyte** | Supported\* | No Support | Supported\*\* | +| **Support: Self-Managed Enterprise** | Supported\* | No Support | Supported\*\* | | **Support: Community (OSS)** | Slack Support only | No Support | Slack Support only | -\*For Certified connectors, Official Support SLAs are only available to customers with Premium Support included in their contract. Otherwise, please use our support portal and we will address your issues as soon as possible. +\*For Certified connectors, Official Support SLAs are only available to customers with Premium +Support included in their contract. Otherwise, please use our support portal and we will address +your issues as soon as possible. -\*\*For Custom connectors, Official Support SLAs are only available to customers with Premium Support included in their contract. This support is provided with best efforts, and maintenance/upgrades are owned by the customer. +\*\*For Custom connectors, Official Support SLAs are only available to customers with Premium +Support included in their contract. This support is provided with best efforts, and +maintenance/upgrades are owned by the customer. ## Certified -A **Certified** connector is actively maintained and supported by the Airbyte team and maintains a high quality bar. It is production ready. +A **Certified** connector is actively maintained and supported by the Airbyte team and maintains a +high quality bar. It is production ready. ### What you should know about Certified connectors: - Certified connectors are available to all users. - These connectors have been tested and vetted in order to be certified and are production ready. -- Certified connectors should go through minimal breaking change but in the event an upgrade is needed users will be given an adequate upgrade window. +- Certified connectors should go through minimal breaking change but in the event an upgrade is + needed users will be given an adequate upgrade window. ## Community -A **Community** connector is maintained by the Airbyte community until it becomes Certified. Airbyte has over 800 code contributors and 15,000 people in the Slack community to help. The Airbyte team is continually certifying Community connectors as usage grows. As these connectors are not maintained by Airbyte, we do not offer support SLAs around them, and we encourage caution when using them in production. +A **Community** connector is maintained by the Airbyte community until it becomes Certified. Airbyte +has over 800 code contributors and 15,000 people in the Slack community to help. The Airbyte team is +continually certifying Community connectors as usage grows. As these connectors are not maintained +by Airbyte, we do not offer support SLAs around them, and we encourage caution when using them in +production. ### What you should know about Community connectors: - Community connectors are available to all users. -- Community connectors may be upgraded to Certified at any time, and we will notify users of these upgrades via our Slack Community and in our Connector Catalog. -- Community connectors might not be feature-complete (features planned for release are under development or not prioritized) and may include backward-incompatible/breaking API changes with no or short notice. +- Community connectors may be upgraded to Certified at any time, and we will notify users of these + upgrades via our Slack Community and in our Connector Catalog. +- Community connectors might not be feature-complete (features planned for release are under + development or not prioritized) and may include backward-incompatible/breaking API changes with no + or short notice. - Community connectors have no Support SLAs. + +## Archived + +From time to time, Airbyte will remove a connector from the Connector Catalog. This is typically due +extremely low usage and/or if the connector is no longer maintained by the community. This is +necessary to ensure that the Connector Catalog maintains a minimum level of quality. + +Archived connectors will not receive any further updates or support from the Airbyte team. Archived +connectors remain source-available in the +[`airbytehq/connector-archive`](https://github.com/airbytehq/connector-archive) repository on +GitHub. If you wish to take over the maintenance of an archived connector, please open a Github +Discussion. diff --git a/docs/integrations/destinations/kvdb.md b/docs/integrations/destinations/kvdb.md index 2574237a92e0..cb8ba2c53cb1 100644 --- a/docs/integrations/destinations/kvdb.md +++ b/docs/integrations/destinations/kvdb.md @@ -2,10 +2,27 @@ The KVDB destination for Airbyte +## Prerequisites + +None. + +## Setup guide + +TODO + +## Supported sync modes + +TODO + +## Supported streams + +TODO + ## Changelog -| Version | Date | Pull Request | Subject | -| :------ | :--------- | :------------------------------------------------------- | :-------------------------------- | -| 0.1.2 | 2024-02-19 | [35422](https://github.com/airbytehq/airbyte/pull/35422) | bump connector version to publish | -| 0.1.1 | 2024-02-16 | [35370](https://github.com/airbytehq/airbyte/pull/35370) | bump connector version to publish | -| 0.1.0 | 2021-07-19 | [4786](https://github.com/airbytehq/airbyte/pull/4786) | Python Demo Destination: KVDB | +| Version | Date | Pull Request | Subject | +| :------ | :--------- | :------------------------------------------------------- | :------------------------------------------------------------------------- | +| 0.1.3 | 2024-02-19 | [xxx](https://github.com/airbytehq/airbyte/pull/xxx) | bump connector version to publish, convert to base docker image and poetry | +| 0.1.2 | 2024-02-19 | [35422](https://github.com/airbytehq/airbyte/pull/35422) | bump connector version to publish | +| 0.1.1 | 2024-02-16 | [35370](https://github.com/airbytehq/airbyte/pull/35370) | bump connector version to publish | +| 0.1.0 | 2021-07-19 | [4786](https://github.com/airbytehq/airbyte/pull/4786) | Python Demo Destination: KVDB | diff --git a/docs/integrations/sources/facebook-marketing.md b/docs/integrations/sources/facebook-marketing.md index 5fdc14ac43c4..694fe73553e1 100644 --- a/docs/integrations/sources/facebook-marketing.md +++ b/docs/integrations/sources/facebook-marketing.md @@ -78,14 +78,11 @@ You can use the [Access Token Tool](https://developers.facebook.com/tools/access ::: 3. (Optional) For **End Date**, use the provided datepicker, or enter the date programmatically in the `YYYY-MM-DDTHH:mm:ssZ` format. This is the date until which you'd like to replicate data for all Incremental streams. All data generated between the start date and this end date will be replicated. Not setting this option will result in always syncing the latest data. -4. (Optional) Toggle the **Include Deleted Campaigns, Ads, and AdSets** button to include data from deleted Campaigns, Ads, and AdSets. - - :::info - The Facebook Marketing API does not have a concept of deleting records in the same way that a database does. While you can archive or delete an ad campaign, the API maintains a record of the campaign. Toggling the **Include Deleted** button lets you replicate records for campaigns or ads even if they were archived or deleted from the Facebook platform. - ::: - -5. (Optional) Toggle the **Fetch Thumbnail Images** button to fetch the `thumbnail_url` and store the result in `thumbnail_data_url` for each [Ad Creative](https://developers.facebook.com/docs/marketing-api/creative/). -6. (Optional) In the **Custom Insights** section, you may provide a list of ad statistics entries. Each entry should have a unique name and can contain fields, breakdowns or action_breakdowns. Fields refer to the different data points you can collect from an ad, while breakdowns and action_breakdowns let you segment this data for more detailed insights. Click on **Add** to create a new entry in this list. +4. (Optional) Multiselect the **Campaign Statuses** to include data from Campaigns for particular statuses. +5. (Optional) Multiselect the **AdSet Statuses** to include data from AdSets for particular statuses. +6. (Optional) Multiselect the **Ad Statuses** to include data from Ads for particular statuses. +7. (Optional) Toggle the **Fetch Thumbnail Images** button to fetch the `thumbnail_url` and store the result in `thumbnail_data_url` for each [Ad Creative](https://developers.facebook.com/docs/marketing-api/creative/). +8. (Optional) In the **Custom Insights** section, you may provide a list of ad statistics entries. Each entry should have a unique name and can contain fields, breakdowns or action_breakdowns. Fields refer to the different data points you can collect from an ad, while breakdowns and action_breakdowns let you segment this data for more detailed insights. Click on **Add** to create a new entry in this list. :::note To retrieve specific fields from Facebook Ads Insights combined with other breakdowns, you can choose which fields and breakdowns to sync. However, please note that not all fields can be requested, and many are only functional when combined with specific other fields. For example, the breakdown `app_id` is only supported with the `total_postbacks` field. For more information on the breakdown limitations, refer to the [Facebook documentation](https://developers.facebook.com/docs/marketing-api/insights/breakdowns). @@ -113,10 +110,10 @@ You can use the [Access Token Tool](https://developers.facebook.com/tools/access Additional data streams for your Facebook Marketing connector are dynamically generated according to the Custom Insights you specify. If you have an existing Facebook Marketing source and you decide to update or remove some of your Custom Insights, you must also adjust the connections that sync to these streams. Specifically, you should either disable these connections or refresh the source schema associated with them to reflect the changes. ::: -7. (Optional) For **Page Size of Requests**, you can specify the number of records per page for paginated responses. Most users do not need to set this field unless specific issues arise or there are unique use cases that require tuning the connector's settings. The default value is set to retrieve 100 records per page. -8. (Optional) For **Insights Window Lookback**, you may set a window in days to revisit data during syncing to capture updated conversion data from the API. Facebook allows for attribution windows of up to 28 days, during which time a conversion can be attributed to an ad. If you have set a custom attribution window in your Facebook account, please set the same value here. Otherwise, you may leave it at the default value of 28. For more information on action attributions, please refer to [the Meta Help Center](https://www.facebook.com/business/help/458681590974355?id=768381033531365). -8. (Optional) For **Insights Job Timeout**, you may set a custom value in range from 10 to 60. It establishes the maximum amount of time (in minutes) of waiting for the report job to complete. -9. Click **Set up source** and wait for the tests to complete. +9. (Optional) For **Page Size of Requests**, you can specify the number of records per page for paginated responses. Most users do not need to set this field unless specific issues arise or there are unique use cases that require tuning the connector's settings. The default value is set to retrieve 100 records per page. +10. (Optional) For **Insights Window Lookback**, you may set a window in days to revisit data during syncing to capture updated conversion data from the API. Facebook allows for attribution windows of up to 28 days, during which time a conversion can be attributed to an ad. If you have set a custom attribution window in your Facebook account, please set the same value here. Otherwise, you may leave it at the default value of 28. For more information on action attributions, please refer to [the Meta Help Center](https://www.facebook.com/business/help/458681590974355?id=768381033531365). +11. (Optional) For **Insights Job Timeout**, you may set a custom value in range from 10 to 60. It establishes the maximum amount of time (in minutes) of waiting for the report job to complete. +12. Click **Set up source** and wait for the tests to complete. @@ -203,7 +200,9 @@ The Facebook Marketing connector uses the `lookback_window` parameter to repeate | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| 1.3.3 | 2024-02-15 | [35061](https://github.com/airbytehq/airbyte/pull/35061) | Add integration tests | +| 1.4.1 | 2024-02-21 | [35467](https://github.com/airbytehq/airbyte/pull/35467) | Fix error with incorrect state transforming in the 1.4.0 version | +| 1.4.0 | 2024-02-20 | [32449](https://github.com/airbytehq/airbyte/pull/32449) | Replace "Include Deleted Campaigns, Ads, and AdSets" option in configuration with specific statuses selection per stream | +| 1.3.3 | 2024-02-15 | [35061](https://github.com/airbytehq/airbyte/pull/35061) | Add integration tests | | | 1.3.2 | 2024-02-12 | [35178](https://github.com/airbytehq/airbyte/pull/35178) | Manage dependencies with Poetry | | 1.3.1 | 2024-02-05 | [34845](https://github.com/airbytehq/airbyte/pull/34845) | Add missing fields to schemas | | 1.3.0 | 2024-01-09 | [33538](https://github.com/airbytehq/airbyte/pull/33538) | Updated the `Ad Account ID(s)` property to support multiple IDs | diff --git a/docs/integrations/sources/mongodb-v2.md b/docs/integrations/sources/mongodb-v2.md index 4515d39997fb..0df7d0f12a97 100644 --- a/docs/integrations/sources/mongodb-v2.md +++ b/docs/integrations/sources/mongodb-v2.md @@ -214,7 +214,8 @@ For more information regarding configuration parameters, please see [MongoDb Doc | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:----------------------------------------------------------------------------------------------------------| -| 1.2.10 | 2024-02-13 | [35036](https://github.com/airbytehq/airbyte/pull/34751) | Emit analytics message for invalid CDC cursor. | +| 1.2.11 | 2024-02-20 | [35375](https://github.com/airbytehq/airbyte/pull/35375) | Add config to throw an error on invalid CDC position and enable it by default. | +| 1.2.10 | 2024-02-13 | [35036](https://github.com/airbytehq/airbyte/pull/34751) | Emit analytics message for invalid CDC cursor. | | 1.2.9 | 2024-02-13 | [35114](https://github.com/airbytehq/airbyte/pull/35114) | Extend subsequent cdc record wait time to the duration of initial. Bug Fixes | | 1.2.8 | 2024-02-08 | [34748](https://github.com/airbytehq/airbyte/pull/34748) | Adopt CDK 0.19.0 | | 1.2.7 | 2024-02-01 | [34759](https://github.com/airbytehq/airbyte/pull/34759) | Fail sync if initial snapshot for any stream fails. | diff --git a/docs/integrations/sources/mysql.md b/docs/integrations/sources/mysql.md index 09117fd7e280..18e59658342d 100644 --- a/docs/integrations/sources/mysql.md +++ b/docs/integrations/sources/mysql.md @@ -223,6 +223,7 @@ Any database or table encoding combination of charset and collation is supported | Version | Date | Pull Request | Subject | |:--------|:-----------|:-----------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------| +| 3.3.8 | 2024-02-20 | [35338](https://github.com/airbytehq/airbyte/pull/35338) | Add config to throw an error on invalid CDC position. | | 3.3.7 | 2024-02-13 | [35036](https://github.com/airbytehq/airbyte/pull/34751) | Emit analytics message for invalid CDC cursor. | | 3.3.6 | 2024-02-13 | [34869](https://github.com/airbytehq/airbyte/pull/34573) | Don't emit state in SourceStateIterator when there is an underlying stream failure. | | 3.3.5 | 2024-02-12 | [34580](https://github.com/airbytehq/airbyte/pull/34580) | Support special chars in db name | diff --git a/docs/integrations/sources/mysql/mysql-troubleshooting.md b/docs/integrations/sources/mysql/mysql-troubleshooting.md index aee512157839..7e6265d0b867 100644 --- a/docs/integrations/sources/mysql/mysql-troubleshooting.md +++ b/docs/integrations/sources/mysql/mysql-troubleshooting.md @@ -18,6 +18,18 @@ * Amazon RDS MySQL or MariaDB connection issues: If you see the following `Cannot create a PoolableConnectionFactory` error, please add `enabledTLSProtocols=TLSv1.2` in the JDBC parameters. * Amazon RDS MySQL connection issues: If you see `Error: HikariPool-1 - Connection is not available, request timed out after 30001ms.`, many times this due to your VPC not allowing public traffic. We recommend going through [this AWS troubleshooting checklist](https://aws.amazon.com/premiumsupport/knowledge-center/rds-cannot-connect/) to ensure the correct permissions/settings have been granted to allow Airbyte to connect to your database. +### Under CDC incremental mode, there are still full refresh syncs + +Normally under the CDC mode, the MySQL source will first run a full refresh sync to read the snapshot of all the existing data, and all subsequent runs will only be incremental syncs reading from the binlogs. However, occasionally, you may see full refresh syncs after the initial run. When this happens, you will see the following log: + +> Saved offset no longer present on the server, Airbyte is going to trigger a sync from scratch + +The root causes is that the binglogs needed for the incremental sync have been removed by MySQL. This can occur under the following scenarios: + +- When there are lots of database updates resulting in more WAL files than allowed in the `pg_wal` directory, Postgres will purge or archive the WAL files. This scenario is preventable. Possible solutions include: + - Sync the data source more frequently. + - Set a higher `binlog_expire_logs_seconds`. It's recommended to set this value to a time period of 7 days. See detailed documentation [here](https://dev.mysql.com/doc/refman/8.0/en/replication-options-binary-log.html#sysvar_binlog_expire_logs_seconds). The downside of this approach is that more disk space will be needed. + ### EventDataDeserializationException errors during initial snapshot When a sync runs for the first time using CDC, Airbyte performs an initial consistent snapshot of your database. Airbyte doesn't acquire any table locks \(for tables defined with MyISAM engine, the tables would still be locked\) while creating the snapshot to allow writes by other database clients. But in order for the sync to work without any error/unexpected behaviour, it is assumed that no schema changes are happening while the snapshot is running. diff --git a/docs/integrations/sources/postgres.md b/docs/integrations/sources/postgres.md index 4c8649e7c5bf..d9cdddfc9f7e 100644 --- a/docs/integrations/sources/postgres.md +++ b/docs/integrations/sources/postgres.md @@ -292,9 +292,10 @@ According to Postgres [documentation](https://www.postgresql.org/docs/14/datatyp | Version | Date | Pull Request | Subject | |---------|------------|----------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 3.3.11 | 2024-02-20 | [35304](https://github.com/airbytehq/airbyte/pull/35304) | Add config to throw an error on invalid CDC position and enable it by default. | | 3.3.10 | 2024-02-13 | [35036](https://github.com/airbytehq/airbyte/pull/34751) | Emit analytics message for invalid CDC cursor. | -| 3.3.9 | 2024-02-13 | [35224](https://github.com/airbytehq/airbyte/pull/35224) | Adopt CDK 0.20.4 | -| 3.3.8 | 2024-02-08 | [34751](https://github.com/airbytehq/airbyte/pull/34751) | Adopt CDK 0.19.0 | +| 3.3.9 | 2024-02-13 | [35224](https://github.com/airbytehq/airbyte/pull/35224) | Adopt CDK 0.20.4 | +| 3.3.8 | 2024-02-08 | [34751](https://github.com/airbytehq/airbyte/pull/34751) | Adopt CDK 0.19.0 | | 3.3.7 | 2024-02-08 | [34781](https://github.com/airbytehq/airbyte/pull/34781) | Add a setting in the setup page to advance the LSN. | | 3.3.6 | 2024-02-07 | [34892](https://github.com/airbytehq/airbyte/pull/34892) | Adopt CDK v0.16.6 | | 3.3.5 | 2024-02-07 | [34948](https://github.com/airbytehq/airbyte/pull/34948) | Adopt CDK v0.16.5 | diff --git a/docs/integrations/sources/postgres/postgres-troubleshooting.md b/docs/integrations/sources/postgres/postgres-troubleshooting.md index 721c4b694269..329cc2af7274 100644 --- a/docs/integrations/sources/postgres/postgres-troubleshooting.md +++ b/docs/integrations/sources/postgres/postgres-troubleshooting.md @@ -78,7 +78,7 @@ Normally under the CDC mode, the Postgres source will first run a full refresh s The root causes is that the WALs needed for the incremental sync has been removed by Postgres. This can occur under the following scenarios: - When there are lots of database updates resulting in more WAL files than allowed in the `pg_wal` directory, Postgres will purge or archive the WAL files. This scenario is preventable. Possible solutions include: - - Sync the data source more frequently. The downside is that more computation resources will be consumed, leading to a higher Airbyte bill. + - Sync the data source more frequently. - Set a higher `wal_keep_size`. If no unit is provided, it is in megabytes, and the default is `0`. See detailed documentation [here](https://www.postgresql.org/docs/current/runtime-config-replication.html#GUC-WAL-KEEP-SIZE). The downside of this approach is that more disk space will be needed. - When the Postgres connector successfully reads the WAL and acknowledges it to Postgres, but the destination connector fails to consume the data, the Postgres connector will try to read the same WAL again, which may have been removed by Postgres, since the WAL record is already acknowledged. This scenario is rare, because it can happen, and currently there is no way to prevent it. The correct behavior is to perform a full refresh. diff --git a/docs/integrations/sources/s3.md b/docs/integrations/sources/s3.md index d225bba5909a..ff8263e911c5 100644 --- a/docs/integrations/sources/s3.md +++ b/docs/integrations/sources/s3.md @@ -88,6 +88,10 @@ The Amazon S3 source connector supports the following [sync modes](https://docs. | Replicate Multiple Streams \(distinct tables\) | Yes | | Namespaces | No | +## Supported streams + +There is no predefined streams. The streams are based on content of your bucket. + ## File Compressions | Compression | Supported? | @@ -260,8 +264,9 @@ To perform the text extraction from PDF and Docx files, the connector uses the [ | Version | Date | Pull Request | Subject | |:--------|:-----------|:----------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------| -| 4.5.4 | 2024-02-15 | [35055](https://github.com/airbytehq/airbyte/pull/35055) | Temporarily revert concurrency | -| 4.5.3 | 2024-02-12 | [35164](https://github.com/airbytehq/airbyte/pull/35164) | Manage dependencies with Poetry. | +| 4.5.5 | 2024-02-18 | [35392](https://github.com/airbytehq/airbyte/pull/35392) | Add support filtering by start date | +| 4.5.4 | 2024-02-15 | [35055](https://github.com/airbytehq/airbyte/pull/35055) | Temporarily revert concurrency | +| 4.5.3 | 2024-02-12 | [35164](https://github.com/airbytehq/airbyte/pull/35164) | Manage dependencies with Poetry. | | 4.5.2 | 2024-02-06 | [34930](https://github.com/airbytehq/airbyte/pull/34930) | Bump CDK version to fix issue when SyncMode is missing from catalog | | 4.5.1 | 2024-02-02 | [31701](https://github.com/airbytehq/airbyte/pull/31701) | Add `region` support | | 4.5.0 | 2024-02-01 | [34591](https://github.com/airbytehq/airbyte/pull/34591) | Run full refresh syncs concurrently | diff --git a/docusaurus/src/components/ConnectorRegistry.jsx b/docusaurus/src/components/ConnectorRegistry.jsx index d3548c350d34..1766c204eddf 100644 --- a/docusaurus/src/components/ConnectorRegistry.jsx +++ b/docusaurus/src/components/ConnectorRegistry.jsx @@ -19,10 +19,12 @@ Sorts connectors by release stage and then name */ function connectorSort(a, b) { if (a.supportLevel_oss !== b.supportLevel_oss) { - if (a.supportLevel_oss === "certified") return -2; - if (b.supportLevel_oss === "certified") return 2; - if (a.supportLevel_oss === "community") return -1; - if (b.supportLevel_oss === "community") return 1; + if (a.supportLevel_oss === "certified") return -3; + if (b.supportLevel_oss === "certified") return 3; + if (a.supportLevel_oss === "community") return -2; + if (b.supportLevel_oss === "community") return 2; + if (a.supportLevel_oss === "archived") return -1; + if (b.supportLevel_oss === "archived") return 1; } if (a.name_oss < b.name_oss) return -1; diff --git a/docusaurus/src/components/HeaderDecoration.jsx b/docusaurus/src/components/HeaderDecoration.jsx index e4ebdc918f52..9d23dcf77a86 100644 --- a/docusaurus/src/components/HeaderDecoration.jsx +++ b/docusaurus/src/components/HeaderDecoration.jsx @@ -38,6 +38,7 @@ export const HeaderDecoration = ({ const isOss = isOssString.toUpperCase() === "TRUE"; const isCloud = isCloudString.toUpperCase() === "TRUE"; const isPypiPublished = isPypiPublishedString.toUpperCase() === "TRUE"; + const isArchived = supportLevel.toUpperCase() === "ARCHIVED"; return ( <> @@ -51,13 +52,17 @@ export const HeaderDecoration = ({ {isOss ? CHECK_ICON : CROSS_ICON} Airbyte OSS - {isPypiPublished && {CHECK_ICON} airbyte_lib} + {isPypiPublished && ( + + {CHECK_ICON} airbyte_lib + + )}
Support Level
- + {capitalizeFirstLetter(supportLevel)}
@@ -74,9 +79,16 @@ export const HeaderDecoration = ({
-

{originalTitle}

+

+ {isArchived ? ( + + {originalTitle} [ARCHIVED] + + ) : ( + originalTitle + )} +

); }; - diff --git a/docusaurus/src/remark/docsHeaderDecoration.js b/docusaurus/src/remark/docsHeaderDecoration.js index 8f8fcb96c8cd..26109925264a 100644 --- a/docusaurus/src/remark/docsHeaderDecoration.js +++ b/docusaurus/src/remark/docsHeaderDecoration.js @@ -1,5 +1,4 @@ const visit = require("unist-util-visit").visit; -const { isPypiConnector } = require("../connector_registry"); const { isDocsPage, getRegistryEntry } = require("./utils"); const toAttributes = (props) =>