Skip to content
This repository has been archived by the owner on Jan 2, 2025. It is now read-only.

Commit

Permalink
Merge pull request #20 from google/ads_api
Browse files Browse the repository at this point in the history
Update from AdWords API to Ads API
  • Loading branch information
antoniolmm authored May 25, 2021
2 parents 47cc4e9 + 723fcdd commit c47798b
Show file tree
Hide file tree
Showing 16 changed files with 347 additions and 257 deletions.
18 changes: 9 additions & 9 deletions megalist_dataflow/mappers/ads_user_list_pii_hashing_mapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,18 +42,18 @@ def _hash_user(self, user, hasher):

try:
if 'email' in user:
hashed['hashedEmail'] = hasher.hash_field(user['email'])
hashed['hashed_email'] = hasher.hash_field(user['email'])
del hashed['email']
except:
self.logger.error("Error hashing email for user: %s" % user)

try:
if 'mailing_address_first_name' in user and 'mailing_address_last_name' in user:
hashed['addressInfo'] = {
'hashedFirstName': hasher.hash_field(user['mailing_address_first_name']),
'hashedLastName': hasher.hash_field(user['mailing_address_last_name']),
'countryCode': user['mailing_address_country'],
'zipCode': user['mailing_address_zip']
hashed['address_info'] = {
'hashed_first_name': hasher.hash_field(user['mailing_address_first_name']),
'hashed_last_name': hasher.hash_field(user['mailing_address_last_name']),
'country_code': user['mailing_address_country'],
'postal_code': user['mailing_address_zip']
}
del hashed['mailing_address_first_name']
del hashed['mailing_address_last_name']
Expand All @@ -64,22 +64,22 @@ def _hash_user(self, user, hasher):

try:
if 'phone' in user:
hashed['hashedPhoneNumber'] = hasher.hash_field(user['phone'])
hashed['hashed_phone_number'] = hasher.hash_field(user['phone'])
del hashed['phone']
except:
self.logger.error("Error hashing phone for user: %s" % user)

try:
if 'mobile_device_id' in user:
hashed['mobileId'] = user['mobile_device_id']
hashed['mobile_id'] = user['mobile_device_id']
del hashed['mobile_device_id']
except:
self.logger.error(
"Error hashing mobile_device_id for user: %s" % user)

try:
if 'user_id' in user:
hashed['userId'] = hasher.hash_field(user['user_id'])
hashed['third_party_user_id'] = hasher.hash_field(user['user_id'])
del hashed['user_id']
except:
self.logger.error("Error hashing user_id for user: %s" % user)
Expand Down
48 changes: 24 additions & 24 deletions megalist_dataflow/mappers/ads_user_list_pii_hashing_mapper_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,21 +64,21 @@ def test_pii_hashing(mocker):
assert len(hashed) == 2

assert hashed[0] == {
'hashedEmail': 'd709f370e52b57b4eb75f04e2b3422c4d41a05148cad8f81776d94a048fb70af',
'addressInfo': {
'countryCode': 'US',
'hashedFirstName': '96d9632f363564cc3032521409cf22a852f2032eec099ed5967c0d000cec607a',
'hashedLastName': '799ef92a11af918e3fb741df42934f3b568ed2d93ac1df74f1b8d41a27932a6f',
'zipCode': '12345'
'hashed_email': 'd709f370e52b57b4eb75f04e2b3422c4d41a05148cad8f81776d94a048fb70af',
'address_info': {
'country_code': 'US',
'hashed_first_name': '96d9632f363564cc3032521409cf22a852f2032eec099ed5967c0d000cec607a',
'hashed_last_name': '799ef92a11af918e3fb741df42934f3b568ed2d93ac1df74f1b8d41a27932a6f',
'postal_code': '12345'
}}

assert hashed[1] == {
'hashedEmail': '7c815580ad3844bcb627c74d24eaf700e1a711d9c23e9beb62ab8d28e8cb7954',
'addressInfo': {
'countryCode': 'US',
'hashedFirstName': '81f8f6dde88365f3928796ec7aa53f72820b06db8664f5fe76a7eb13e24546a2',
'hashedLastName': '799ef92a11af918e3fb741df42934f3b568ed2d93ac1df74f1b8d41a27932a6f',
'zipCode': '12345'
'hashed_email': '7c815580ad3844bcb627c74d24eaf700e1a711d9c23e9beb62ab8d28e8cb7954',
'address_info': {
'country_code': 'US',
'hashed_first_name': '81f8f6dde88365f3928796ec7aa53f72820b06db8664f5fe76a7eb13e24546a2',
'hashed_last_name': '799ef92a11af918e3fb741df42934f3b568ed2d93ac1df74f1b8d41a27932a6f',
'postal_code': '12345'
}}


Expand Down Expand Up @@ -111,19 +111,19 @@ def test_avoid_pii_hashing(mocker):
assert len(hashed) == 2

assert hashed[0] == {
'hashedEmail': '[email protected]',
'addressInfo': {
'countryCode': 'US',
'hashedFirstName': 'John',
'hashedLastName': 'Doe',
'zipCode': '12345'
'hashed_email': '[email protected]',
'address_info': {
'country_code': 'US',
'hashed_first_name': 'John',
'hashed_last_name': 'Doe',
'postal_code': '12345'
}}

assert hashed[1] == {
'hashedEmail': '[email protected]',
'addressInfo': {
'countryCode': 'US',
'hashedFirstName': 'Jane',
'hashedLastName': 'Doe',
'zipCode': '12345'
'hashed_email': '[email protected]',
'address_info': {
'country_code': 'US',
'hashed_first_name': 'Jane',
'hashed_last_name': 'Doe',
'postal_code': '12345'
}}
2 changes: 1 addition & 1 deletion megalist_dataflow/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
googleads==24.1.0
google-ads==11.0.0
httplib2==0.17.4
protobuf==3.13.0
google-api-python-client==1.12.8
Expand Down
4 changes: 2 additions & 2 deletions megalist_dataflow/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,11 @@

setuptools.setup(
name='megalist_dataflow',
version='3.0',
version='4.0',
author='Google',
author_email='[email protected]',
url='https://github.com/google/megalista/',
install_requires=['googleads==24.1.0', 'google-api-python-client==1.12.8',
install_requires=['google-ads==11.0.0', 'google-api-python-client==1.12.8',
'google-cloud-core==1.4.1', 'google-cloud-bigquery==1.27.2',
'google-cloud-datastore==1.13.1', 'aiohttp==3.6.2',
'google-cloud-storage==1.38.0'],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@
from uploaders import utils
from models.execution import Batch, DestinationType, Execution

_DEFAULT_LOGGER: str = 'megalista.GoogleAdsOfflineConversionsUploader'


class GoogleAdsOfflineUploaderDoFn(beam.DoFn):

Expand All @@ -27,10 +29,17 @@ def __init__(self, oauth_credentials, developer_token):
self.developer_token = developer_token
self.active = self.developer_token is not None

def _get_ads_service(self, customer_id: str):
return utils.get_ads_service('GoogleAdsService', 'v7',
self.oauth_credentials,
self.developer_token.get(),
customer_id)

def _get_oc_service(self, customer_id):
return utils.get_ads_service('OfflineConversionFeedService', 'v201809',
return utils.get_ads_service('ConversionUploadService', 'v7',
self.oauth_credentials,
self.developer_token.get(), customer_id)
self.developer_token.get(),
customer_id)

def start_bundle(self):
pass
Expand All @@ -56,25 +65,43 @@ def process(self, batch: Batch, **kwargs):
execution = batch.execution
self._assert_conversion_name_is_present(execution)

oc_service = self._get_oc_service(
execution.account_config.google_ads_account_id)
customer_id = execution.account_config.google_ads_account_id.replace('-', '')
oc_service = self._get_oc_service(customer_id)

resource_name = self._get_resource_name(customer_id, execution.destination.destination_metadata[0])

self._do_upload(oc_service,
execution.destination.destination_metadata[0],
resource_name,
customer_id,
batch.elements)

@staticmethod
def _do_upload(oc_service, conversion_name, rows):
logging.getLogger().warning('Uploading {} rows to Google Ads'.format(
len(rows)))
upload_data = [{
'operator': 'ADD',
'operand': {
'conversionName': conversion_name,
'conversionTime': utils.format_date(conversion['time']),
'conversionValue': conversion['amount'],
'googleClickId': conversion['gclid']
}
def _do_upload(oc_service, conversion_resource_name, customer_id, rows):
logging.getLogger(_DEFAULT_LOGGER).info(f'Uploading {len(rows)} offline conversions on {conversion_resource_name} to Google Ads.')
conversions = [{
'conversion_action': conversion_resource_name,
'conversion_date_time': utils.format_date(conversion['time']),
'conversion_value': int(conversion['amount']),
'gclid': conversion['gclid']
} for conversion in rows]

upload_data = {
'customer_id': customer_id,
'partial_failure': True,
'validate_only': False,
'conversions': conversions
}


response = oc_service.upload_click_conversions(request=upload_data)
utils.print_partial_error_messages(_DEFAULT_LOGGER, 'uploading offline conversions', response)

oc_service.mutate(upload_data)
def _get_resource_name(self, customer_id: str, name: str):
resource_name = None
service = self._get_ads_service(customer_id)
query = f"SELECT conversion_action.resource_name FROM conversion_action WHERE conversion_action.name = '{name}'"
response_query = service.search_stream(customer_id=customer_id, query=query)
for batch in response_query:
for row in batch.results:
resource_name = row.conversion_action.resource_name
return resource_name
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,8 @@

@pytest.fixture
def uploader(mocker):
mocker.patch('googleads.oauth2.GoogleRefreshTokenClient')
mocker.patch('googleads.adwords.AdWordsClient')
mocker.patch('google.ads.googleads.client.GoogleAdsClient')
mocker.patch('google.ads.googleads.oauth2')
credential_id = StaticValueProvider(str, 'id')
secret = StaticValueProvider(str, 'secret')
access = StaticValueProvider(str, 'access')
Expand All @@ -56,7 +56,6 @@ def test_not_active(mocker, caplog):
uploader_dofn._get_oc_service.assert_not_called()
assert 'Skipping upload, parameters not configured.' in caplog.text


def test_conversion_upload(mocker, uploader):
mocker.patch.object(uploader, '_get_oc_service')
conversion_name = 'user_list'
Expand All @@ -66,10 +65,10 @@ def test_conversion_upload(mocker, uploader):
execution = Execution(_account_config, source, destination)

time1 = '2020-04-09T14:13:55.0005'
time1_result = '20200409 141355 America/Sao_Paulo'
time1_result = '2020-04-09 14:13:55-03:00'

time2 = '2020-04-09T13:13:55.0005'
time2_result = '20200409 131355 America/Sao_Paulo'
time2_result = '2020-04-09 13:13:55-03:00'

batch = Batch(execution, [{
'time': time1,
Expand All @@ -80,23 +79,21 @@ def test_conversion_upload(mocker, uploader):
'amount': '234',
'gclid': '567'
}])

uploader.process(batch)

uploader._get_oc_service.return_value.mutate.assert_any_call([{
'operator': 'ADD',
'operand': {
'conversionName': conversion_name,
'conversionTime': time1_result,
'conversionValue': '123',
'googleClickId': '456'
}
}, {
'operator': 'ADD',
'operand': {
'conversionName': conversion_name,
'conversionTime': time2_result,
'conversionValue': '234',
'googleClickId': '567'
}
}])
uploader._get_oc_service.return_value.upload_click_conversions.assert_any_call(request = {
'customer_id': 'account_id',
'partial_failure': True,
'validate_only': False,
'conversions': [{
'conversion_action': None,
'conversion_date_time': time1_result,
'conversion_value': 123,
'gclid': '456'
}, {
'conversion_action': None,
'conversion_date_time': time2_result,
'conversion_value': 234,
'gclid': '567'
}]
})
Loading

0 comments on commit c47798b

Please sign in to comment.