Skip to content

Commit

Permalink
Fix newlines (#132)
Browse files Browse the repository at this point in the history
* fix newline handling for tokens

* improve newline handling

* fix package uploads

* a little more testing
  • Loading branch information
mcg1969 authored Feb 11, 2025
1 parent 933ba74 commit 55637a9
Show file tree
Hide file tree
Showing 6 changed files with 35 additions and 14 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -167,4 +167,4 @@ jobs:
source $CONDA/bin/activate
conda install anaconda-client
[[ "$GITHUB_REF" =~ ^refs/tags/ ]] || export LABEL="--label dev"
anaconda --verbose --token $ANACONDA_TOKEN upload --user ctools $LABEL conda-bld/*/*.tar.bz2 --force
anaconda --verbose --token $ANACONDA_TOKEN upload --user ctools $LABEL conda-bld/*/*.{conda,tar.bz2} --force
2 changes: 1 addition & 1 deletion anaconda_anon_usage/tokens.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ def _system_token(fname, what):
fpath = join(path, fname)
if not isfile(fpath):
continue
t_tokens = _read_file(fpath, what + "token")
t_tokens = _read_file(fpath, what + "token", single_line=True)
if t_tokens:
for token in t_tokens.split("/"):
if token not in tokens:
Expand Down
16 changes: 11 additions & 5 deletions anaconda_anon_usage/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,8 @@
# Causes token writes to fail (for testing). The string should contain
# the token types that should fail; c, e
WRITE_CHAOS = os.environ.get("ANACONDA_ANON_USAGE_WRITE_CHAOS") or ""
# Causes token writes to include a trailing newline (for testing).
WRITE_NEWLINE = False

WRITE_SUCCESS = 0
WRITE_DEFER = 1
Expand Down Expand Up @@ -116,6 +118,8 @@ def _write_attempt(must_exist, fpath, client_token, emulate_fail=False):
if emulate_fail:
raise OSError(errno.EROFS, "Testing permissions issues")
os.makedirs(dirname(fpath), exist_ok=True)
if WRITE_NEWLINE:
client_token = client_token + "\n# Test comment"
with open(fpath, "w") as fp:
fp.write(client_token)
_debug("Token saved: %s", fpath)
Expand Down Expand Up @@ -156,7 +160,7 @@ def _deferred_exists(
return token


def _read_file(fpath, what, must_exist=None, read_only=False):
def _read_file(fpath, what, read_only=False, single_line=False):
"""
Implements the saved token functionality. If the specified
file exists, and contains a token with the right format,
Expand All @@ -178,9 +182,13 @@ def _read_file(fpath, what, must_exist=None, read_only=False):
_debug("%s file is not present", what)
else:
try:
# Use just the first line of the file, if it exists
with open(fpath) as fp:
data = fp.read()
if single_line:
# Use just the first non-blank line of the file
data = data.strip()
if data:
data = data.splitlines()[0]
_debug("Retrieved %s: %s", what, data)
return data
except Exception as exc:
Expand All @@ -196,9 +204,7 @@ def _saved_token(fpath, what, must_exist=None, read_only=False):
"""
global DEFERRED
what = what + " token"
client_token = _read_file(fpath, what) or ""
# Just use the first line of the file
client_token = "".join(client_token.splitlines()[:1])
client_token = _read_file(fpath, what, single_line=True) or ""
if not read_only and len(client_token) < TOKEN_LENGTH:
if len(client_token) > 0:
_debug("Generating longer %s", what)
Expand Down
4 changes: 2 additions & 2 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,9 @@ def system_tokens():
otoken = utils._random_token()
mtoken = utils._random_token()
with open(dirname(tpaths[1]) + "/org_token", "w") as fp:
fp.write(otoken)
fp.write(otoken + "\n# Anaconda organization token\n")
with open(dirname(tpaths[1]) + "/machine_token", "w") as fp:
fp.write(mtoken)
fp.write(mtoken + "\n# Anaconda machine token\n")
yield (otoken, mtoken)


Expand Down
3 changes: 3 additions & 0 deletions tests/unit/test_patch.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import re

from conda.base.context import context

from anaconda_anon_usage import patch, tokens
Expand All @@ -11,6 +13,7 @@
def _assert_has_expected_tokens(must=BASIC, mustnot=()):
patch.main(plugin=True)
assert context.user_agent is not None
assert re.match(r"^[A-Za-z0-9\-_.~!#$&'()*+,/:;=?@[\] ]+$", context.user_agent)
tokens = {
tok.split("/", 1)[0] for tok in context.user_agent.split(" ") if "/" in tok
}
Expand Down
22 changes: 17 additions & 5 deletions tests/unit/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,23 @@ def test_random_token():
def test_saved_token_saving(tmpdir):
token_path = tmpdir.join("aau_token")
token_saved = utils._saved_token(token_path, "test")
assert exists(token_path)
with open(token_path) as token_file:
token_stored = token_file.read()
assert len(token_stored) == 22
assert token_stored == token_saved
assert len(token_saved) == 22
token_stored = utils._read_file(token_path, "test", read_only=True)
assert token_stored and token_stored == token_saved


def test_saved_token_newline(monkeypatch, tmpdir):
monkeypatch.setattr(utils, "WRITE_NEWLINE", True)
token_path = tmpdir.join("aau_token")
token_saved = utils._saved_token(token_path, "test")
assert len(token_saved) == 22
token_stored = utils._read_file(token_path, "test", read_only=True)
assert token_stored and token_stored != token_saved
assert token_stored.splitlines()[0] == token_saved
token_stored = utils._read_file(
token_path, "test", read_only=True, single_line=True
)
assert token_stored and token_stored == token_saved


def test_saved_token_exception(tmpdir):
Expand Down

0 comments on commit 55637a9

Please sign in to comment.